Merge "Revert^2 "AudioFlinger: do not reset mHwPaused on flush"" into main
diff --git a/Android.bp b/Android.bp
index 72b8721..0c7ed6e 100644
--- a/Android.bp
+++ b/Android.bp
@@ -34,6 +34,24 @@
     ],
 }
 
+aidl_interface_defaults {
+    name: "audio-aidl-defaults",
+    unstable: true,
+    host_supported: true,
+    backend: {
+        cpp: {
+            enabled: true,
+        },
+        java: {
+            enabled: true,
+        },
+        rust: {
+            enabled: true,
+        },
+    },
+
+}
+
 aidl_interface {
     name: "av-types-aidl",
     unstable: true,
@@ -71,6 +89,18 @@
     },
 }
 
+aidl_interface {
+    name: "audio-permission-aidl",
+    // TODO remove
+    vendor_available: true,
+    double_loadable: true,
+    defaults: ["audio-aidl-defaults"],
+    local_include_dir: "aidl",
+    srcs: [
+        "aidl/com/android/media/permission/*",
+    ],
+}
+
 cc_library_headers {
     name: "av-headers",
     export_include_dirs: ["include"],
@@ -130,6 +160,22 @@
             imports: ["android.hardware.audio.core-V2"],
         },
     ],
-    frozen: true,
+    frozen: false,
 
 }
+
+latest_av_audio_types_aidl = "av-audio-types-aidl-V2"
+
+cc_defaults {
+    name: "latest_av_audio_types_aidl_ndk_shared",
+    shared_libs: [
+        latest_av_audio_types_aidl + "-ndk",
+    ],
+}
+
+cc_defaults {
+    name: "latest_av_audio_types_aidl_ndk_static",
+    static_libs: [
+        latest_av_audio_types_aidl + "-ndk",
+    ],
+}
diff --git a/aidl/com/android/media/permission/INativePermissionController.aidl b/aidl/com/android/media/permission/INativePermissionController.aidl
new file mode 100644
index 0000000..a14092d
--- /dev/null
+++ b/aidl/com/android/media/permission/INativePermissionController.aidl
@@ -0,0 +1,46 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.permission;
+
+import com.android.media.permission.PermissionEnum;
+import com.android.media.permission.UidPackageState;
+
+/**
+ * This interface is used by system_server to communicate permission information
+ * downwards towards native services.
+ * {@hide}
+ */
+interface INativePermissionController {
+    /**
+     * Initialize app-ids and their corresponding packages, to be used for package validation.
+     */
+    void populatePackagesForUids(in List<UidPackageState> initialPackageStates);
+    /**
+     * Replace or populate the list of packages associated with a given uid.
+     * If the list is empty, the package no longer exists.
+     */
+    void updatePackagesForUid(in UidPackageState newPackageState);
+    /**
+     * Populate or replace the list of uids which holds a particular permission.
+     * Runtime permissions will need additional checks, and should not use the cache as-is.
+     * Not virtual device aware.
+     * Is is possible for updates to the permission state to be delayed during high traffic.
+     * @param perm - Enum representing the permission for which holders are being supplied
+     * @param uids - Uids (not app-ids) which hold the permission. Should be sorted
+     */
+    void populatePermissionState(in PermissionEnum perm, in int[] uids);
+}
diff --git a/aidl/com/android/media/permission/PermissionEnum.aidl b/aidl/com/android/media/permission/PermissionEnum.aidl
new file mode 100644
index 0000000..b08db44
--- /dev/null
+++ b/aidl/com/android/media/permission/PermissionEnum.aidl
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.permission;
+
+/**
+ * Enumerates permissions which are tracked/pushed by NativePermissionController
+ * {@hide}
+ */
+enum PermissionEnum {
+    // This is a runtime + WIU permission, which means data delivery should be protected by AppOps
+    // We query the controller only for early fails/hard errors
+    RECORD_AUDIO = 0,
+    MODIFY_AUDIO_ROUTING = 1,
+    MODIFY_AUDIO_SETTINGS = 2,
+    MODIFY_PHONE_STATE = 3,
+    MODIFY_DEFAULT_AUDIO_EFFECTS = 4,
+    WRITE_SECURE_SETTINGS = 5,
+    CALL_AUDIO_INTERCEPTION = 6,
+    ACCESS_ULTRASOUND = 7,
+    CAPTURE_AUDIO_OUTPUT = 8,
+    CAPTURE_MEDIA_OUTPUT = 9,
+    CAPTURE_AUDIO_HOTWORD = 10,
+    CAPTURE_TUNER_AUDIO_INPUT = 11,
+    CAPTURE_VOICE_COMMUNICATION_OUTPUT = 12,
+    BLUETOOTH_CONNECT = 13,
+    ENUM_SIZE = 14, // Not for actual usage, used by Java
+}
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl b/aidl/com/android/media/permission/UidPackageState.aidl
similarity index 65%
rename from media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl
rename to aidl/com/android/media/permission/UidPackageState.aidl
index 9bb0605..747a7ef 100644
--- a/media/libaudioclient/aidl/android/media/AudioPolicyForceUse.aidl
+++ b/aidl/com/android/media/permission/UidPackageState.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2021 The Android Open Source Project
+ * Copyright (C) 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -13,19 +13,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package android.media;
+
+package com.android.media.permission;
 
 /**
+ * Entity representing the package names associated with a particular uid/app-id
  * {@hide}
  */
-@Backing(type="int")
-enum AudioPolicyForceUse {
-    COMMUNICATION = 0,
-    MEDIA = 1,
-    RECORD = 2,
-    DOCK = 3,
-    SYSTEM = 4,
-    HDMI_SYSTEM_AUDIO = 5,
-    ENCODED_SURROUND = 6,
-    VIBRATE_RINGING = 7,
+@JavaDerive(equals = true, toString = true)
+parcelable UidPackageState {
+    int uid;
+    @utf8InCpp List<String> packageNames;
 }
diff --git a/camera/Android.bp b/camera/Android.bp
index d0f8e7e..25b5e2c 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -54,7 +54,13 @@
 cc_aconfig_library {
     name: "camera_platform_flags_c_lib",
     aconfig_declarations: "camera_platform_flags",
+}
+
+cc_aconfig_library {
+    name: "camera_platform_flags_c_lib_for_test",
+    aconfig_declarations: "camera_platform_flags",
     host_supported: true,
+    mode: "test",
 }
 
 java_aconfig_library {
@@ -75,6 +81,7 @@
         local_include_dirs: ["aidl"],
         include_dirs: [
             "frameworks/native/aidl/gui",
+            "frameworks/native/libs/permission/aidl",
         ],
     },
 
@@ -86,54 +93,57 @@
 
         // Source for camera interface parcelables, and manually-written interfaces
         "Camera.cpp",
+        "CameraBase.cpp",
         "CameraMetadata.cpp",
         "CameraParameters.cpp",
-        "CaptureResult.cpp",
         "CameraParameters2.cpp",
         "CameraSessionStats.cpp",
+        "CameraUtils.cpp",
+        "CaptureResult.cpp",
         "ICamera.cpp",
         "ICameraClient.cpp",
         "ICameraRecordingProxy.cpp",
+        "VendorTagDescriptor.cpp",
         "camera2/CaptureRequest.cpp",
         "camera2/ConcurrentCamera.cpp",
         "camera2/OutputConfiguration.cpp",
         "camera2/SessionConfiguration.cpp",
         "camera2/SubmitInfo.cpp",
-        "CameraBase.cpp",
-        "CameraUtils.cpp",
-        "VendorTagDescriptor.cpp",
     ],
 
     shared_libs: [
         "camera_platform_flags_c_lib",
-        "libbase",
-        "libcutils",
-        "libutils",
-        "liblog",
-        "libbinder",
-        "libgui",
-        "libcamera_metadata",
-        "libnativewindow",
+        "framework-permission-aidl-cpp",
         "lib-platform-compat-native-api",
+        "libbase",
+        "libbinder",
+        "libcamera_metadata",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libnativewindow",
+        "libpermission",
+        "libutils",
     ],
 
     include_dirs: [
-        "system/media/private/camera/include",
         "frameworks/native/include/media/openmax",
+        "system/media/private/camera/include",
     ],
     export_include_dirs: [
         "include",
         "include/camera",
     ],
     export_shared_lib_headers: [
+        "framework-permission-aidl-cpp",
         "libcamera_metadata",
-        "libnativewindow",
         "libgui",
+        "libnativewindow",
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
         "-Wextra",
     ],
 
@@ -153,8 +163,8 @@
     ],
 
     include_dirs: [
-        "system/media/private/camera/include",
         "frameworks/native/include/media/openmax",
+        "system/media/private/camera/include",
     ],
 
     export_include_dirs: [
@@ -168,8 +178,8 @@
     name: "libcamera_client_aidl",
     srcs: [
         "aidl/android/hardware/CameraExtensionSessionStats.aidl",
+        "aidl/android/hardware/CameraFeatureCombinationStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
-        "aidl/android/hardware/CameraIdRemapping.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
         "aidl/android/hardware/camera2/ICameraDeviceCallbacks.aidl",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 5d32871..d90f7c9 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -69,13 +69,12 @@
     // deadlock if we call any method of ICamera here.
 }
 
-sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion, bool overrideToPortrait,
-        bool forceSlowJpegMode, int32_t deviceId, int32_t devicePolicy)
+sp<Camera> Camera::connect(int cameraId, int targetSdkVersion, int rotationOverride,
+        bool forceSlowJpegMode, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy)
 {
-    return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode, deviceId,
-            devicePolicy);
+    return CameraBaseT::connect(cameraId, targetSdkVersion, rotationOverride,
+            forceSlowJpegMode, clientAttribution, devicePolicy);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index b2f7cc7..774db25 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -161,10 +161,10 @@
 
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
-                                               const std::string& clientPackageName,
-                                               int clientUid, int clientPid, int targetSdkVersion,
-                                               bool overrideToPortrait, bool forceSlowJpegMode,
-                                               int32_t deviceId, int32_t devicePolicy)
+                                               int targetSdkVersion, int rotationOverride,
+                                               bool forceSlowJpegMode,
+                                               const AttributionSourceState& clientAttribution,
+                                               int32_t devicePolicy)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -174,11 +174,11 @@
     binder::Status ret;
     if (cs != nullptr) {
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
-        ALOGI("Connect camera (legacy API) - overrideToPortrait %d, forceSlowJpegMode %d",
-                overrideToPortrait, forceSlowJpegMode);
-        ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                clientPid, targetSdkVersion, overrideToPortrait, forceSlowJpegMode, deviceId,
-                devicePolicy, /*out*/ &c->mCamera);
+        ALOGI("Connect camera (legacy API) - rotationOverride %d, forceSlowJpegMode %d",
+                rotationOverride, forceSlowJpegMode);
+        ret = (cs.get()->*fnConnectService)(cl, cameraId, targetSdkVersion,
+                rotationOverride, forceSlowJpegMode, clientAttribution, devicePolicy,
+                /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -257,7 +257,8 @@
 }
 
 template <typename TCam, typename TCamTraits>
-int CameraBase<TCam, TCamTraits>::getNumberOfCameras(int32_t deviceId, int32_t devicePolicy) {
+int CameraBase<TCam, TCamTraits>::getNumberOfCameras(
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
 
     if (!cs.get()) {
@@ -266,7 +267,7 @@
     }
     int32_t count;
     binder::Status res = cs->getNumberOfCameras(
-            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, deviceId,
+            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, clientAttribution,
             devicePolicy, &count);
     if (!res.isOk()) {
         ALOGE("Error reading number of cameras: %s",
@@ -279,12 +280,12 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
-        bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, overrideToPortrait, deviceId, devicePolicy,
-            cameraInfo);
+    binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, clientAttribution,
+            devicePolicy, cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
 
diff --git a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
index 1c81831..a3c0e69 100644
--- a/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
+++ b/camera/aidl/android/hardware/CameraExtensionSessionStats.aidl
@@ -66,4 +66,9 @@
      * true if advanced extensions are being used, false otherwise
      */
     boolean isAdvanced = false;
+
+    /**
+     * Format of image capture request
+     */
+    int captureFormat;
 }
\ No newline at end of file
diff --git a/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl b/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl
new file mode 100644
index 0000000..f4a11b1
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * {@hide}
+ */
+parcelable CameraFeatureCombinationStats {
+    /**
+     * Values for feature combination queries
+     */
+    const long CAMERA_FEATURE_UNKNOWN = 0;
+    const long CAMERA_FEATURE_60_FPS = 1 << 0;
+    const long CAMERA_FEATURE_STABILIZATION = 1 << 1;
+    const long CAMERA_FEATURE_HLG10 = 1 << 2;
+    const long CAMERA_FEATURE_JPEG = 1 << 3;
+    const long CAMERA_FEATURE_JPEG_R = 1 << 4;
+    const long CAMERA_FEATURE_4K = 1 << 5;
+
+    /**
+     * Values for notifyFeatureCombinationStats type
+     */
+    enum QueryType {
+        QUERY_FEATURE_COMBINATION = 0,
+        QUERY_SESSION_CHARACTERISTICS = 1,
+    }
+
+    @utf8InCpp String mCameraId;
+    int mUid;
+    long mFeatureCombination;
+    int mQueryType;
+    int mStatus;
+}
diff --git a/camera/aidl/android/hardware/CameraIdRemapping.aidl b/camera/aidl/android/hardware/CameraIdRemapping.aidl
deleted file mode 100644
index 453f696..0000000
--- a/camera/aidl/android/hardware/CameraIdRemapping.aidl
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package android.hardware;
-
-/**
- * Specifies a remapping of Camera Ids.
- *
- * Example: For a given package, a remapping of camera id0 to id1 specifies
- * that any operation to perform on id0 should instead be performed on id1.
- *
- * @hide
- */
-parcelable CameraIdRemapping {
-    /**
-     * Specifies remapping of Camera Ids per package.
-     */
-    parcelable PackageIdRemapping {
-        /** Package Name (e.g. com.android.xyz). */
-        @utf8InCpp String packageName;
-        /**
-         * Ordered list of Camera Ids to replace. Only Camera Ids present in this list will be
-         * affected.
-         */
-        @utf8InCpp List<String> cameraIdsToReplace;
-        /**
-         *  Ordered list of updated Camera Ids, where updatedCameraIds[i] corresponds to
-         *  the updated camera id for cameraIdsToReplace[i].
-         */
-        @utf8InCpp List<String> updatedCameraIds;
-    }
-
-    /**
-     * List of Camera Id remappings to perform.
-     */
-    List<PackageIdRemapping> packageIdRemappings;
-}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index 885749d..ce6c2d3 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -16,6 +16,7 @@
 
 package android.hardware;
 
+import android.content.AttributionSourceState;
 import android.hardware.ICamera;
 import android.hardware.ICameraClient;
 import android.hardware.camera2.ICameraDeviceUser;
@@ -30,7 +31,6 @@
 import android.hardware.camera2.impl.CameraMetadataNative;
 import android.hardware.ICameraServiceListener;
 import android.hardware.CameraInfo;
-import android.hardware.CameraIdRemapping;
 import android.hardware.CameraStatus;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -67,33 +67,51 @@
      *
      * @param type The type of the camera, can be either CAMERA_TYPE_BACKWARD_COMPATIBLE
      *        or CAMERA_TYPE_ALL.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
-    int getNumberOfCameras(int type, int deviceId, int devicePolicy);
+    int getNumberOfCameras(int type, in AttributionSourceState clientAttribution, int devicePolicy);
+
+    /**
+     * If changed, reflect in
+     * frameworks/base/core/java/android/hardware/camera2/CameraManager.java.
+     * We have an enum here since the decision to override to portrait mode / fetch the
+     * rotationOverride as it exists in CameraManager right now is based on a static system
+     * property and not something that changes based dynamically, say on fold state. As a result,
+     * we can't use just a boolean to differentiate between the case where cameraserver should
+     * override to portrait (sensor orientation is 0, 180) or just rotate the sensor feed (sensor
+     * orientation is 90, 270)
+     */
+    const int ROTATION_OVERRIDE_NONE = 0;
+    const int ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT = 1;
+    const int ROTATION_OVERRIDE_ROTATION_ONLY = 2;
 
     /**
      * Fetch basic camera information for a camera.
      *
      * @param cameraId The ID of the camera to fetch information for.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *        correspond to portrait.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      * @return CameraInfo for the camera.
      */
-    CameraInfo getCameraInfo(int cameraId, boolean overrideToPortrait, int deviceId,
-            int devicePolicy);
+    CameraInfo getCameraInfo(int cameraId, int rotationOverride,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
-     * Default UID/PID values for non-privileged callers of
-     * connect() and connectDevice()
+     * Default UID/PID values for non-privileged callers of connect() and connectDevice(). Can be
+     * used to set the pid/uid fields of AttributionSourceState to indicate the calling uid/pid
+     * should be used.
      */
     const int USE_CALLING_UID = -1;
     const int USE_CALLING_PID = -1;
@@ -102,13 +120,14 @@
      * Open a camera device through the old camera API.
      *
      * @param cameraId The ID of the camera to open.
-     * @param opPackageName The package name to report for the app-ops.
-     * @param clientUid UID for the calling client.
-     * @param clientPid PID for the calling client.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *        correspond to portrait.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
      * @param forceSlowJpegMode Whether to force slow jpeg mode.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -116,12 +135,10 @@
      */
     ICamera connect(ICameraClient client,
             int cameraId,
-            @utf8InCpp String opPackageName,
-            int clientUid, int clientPid,
             int targetSdkVersion,
-            boolean overrideToPortrait,
+            int rotationOverride,
             boolean forceSlowJpegMode,
-            int deviceId,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 
     /**
@@ -129,11 +146,13 @@
      * Only supported for device HAL versions >= 3.2.
      *
      * @param cameraId The ID of the camera to open.
-     * @param opPackageName The package name to report for the app-ops.
-     * @param clientUid UID for the calling client.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *        correspond to portrait.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -141,12 +160,10 @@
      */
     ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
             @utf8InCpp String cameraId,
-            @utf8InCpp String opPackageName,
-            @nullable @utf8InCpp String featureId,
-            int clientUid, int oomScoreOffset,
+            int oomScoreOffset,
             int targetSdkVersion,
-            boolean overrideToPortrait,
-            int deviceId,
+            int rotationOverride,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 
     /**
@@ -165,35 +182,24 @@
     ConcurrentCameraIdCombination[] getConcurrentCameraIds();
 
     /**
-      * Check whether a particular set of session configurations are concurrently supported by the
-      * corresponding camera ids.
-      *
-      * @param sessions the set of camera id and session configuration pairs to be queried.
-      * @param targetSdkVersion the target sdk level of the application calling this function.
-      * @return true  - the set of concurrent camera id and stream combinations is supported.
-      *         false - the set of concurrent camera id and stream combinations is not supported
-      *                 OR the method was called with a set of camera ids not returned by
-      *                 getConcurrentCameraIds().
-      */
+     * Check whether a particular set of session configurations are concurrently supported by the
+     * corresponding camera ids.
+     *
+     * @param sessions the set of camera id and session configuration pairs to be queried.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param clientAttribution The AttributionSource of the client.
+     * @param devicePolicy The camera policy of the device of the associated context (default
+     *                     policy for default device context). Only virtual cameras would be exposed
+     *                     only for custom policy and only real cameras would be exposed for default
+     *                     policy.
+     * @return true  - the set of concurrent camera id and stream combinations is supported.
+     *         false - the set of concurrent camera id and stream combinations is not supported
+     *                 OR the method was called with a set of camera ids not returned by
+     *                 getConcurrentCameraIds().
+     */
     boolean isConcurrentSessionConfigurationSupported(
             in CameraIdAndSessionConfiguration[] sessions,
-            int targetSdkVersion);
-
-    /**
-     * Remap Camera Ids in the CameraService.
-     *
-     * Once this is in effect, all binder calls in the ICameraService that
-     * use logicalCameraId should consult remapping state to arrive at the
-     * correct cameraId to perform the operation on.
-     *
-     * Note: Before the new cameraIdRemapping state is applied, the previous
-     * state is cleared.
-     *
-     * @param cameraIdRemapping the camera ids to remap. Sending an unpopulated
-     *        cameraIdRemapping object will result in clearing of any previous
-     *        cameraIdRemapping state in the camera service.
-     */
-    void remapCameraIds(in CameraIdRemapping cameraIdRemapping);
+            int targetSdkVersion, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Inject Session Params into an existing camera session.
@@ -217,9 +223,13 @@
      * Only supported for device HAL versions >= 3.2
      *
      * @param cameraId The ID of the camera to fetch metadata for.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *        correspond to portrait.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param targetSdkVersion the target sdk level of the application calling this function.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -227,7 +237,7 @@
      * @return Characteristics for the given camera.
      */
     CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
-            boolean overrideToPortrait, int deviceId, int devicePolicy);
+            int rotationOverride, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
@@ -267,14 +277,14 @@
      * Set the torch mode for a camera device.
      *
      * @param cameraId The ID of the camera to set torch mode for.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
     void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Change the brightness level of the flash unit associated with cameraId to strengthLevel.
@@ -282,27 +292,28 @@
      *
      * @param cameraId The ID of the camera.
      * @param strengthLevel The torch strength level to set for the camera.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
     void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel,
-            IBinder clientBinder, int deviceId, int devicePolicy);
+            IBinder clientBinder, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Get the brightness level of the flash unit associated with cameraId.
      *
      * @param cameraId The ID of the camera.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      * @return Torch strength level for the camera.
      */
-    int getTorchStrengthLevel(@utf8InCpp String cameraId, int deviceId, int devicePolicy);
+    int getTorchStrengthLevel(@utf8InCpp String cameraId,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Notify the camera service of a system event.  Should only be called from system_server.
@@ -368,7 +379,7 @@
      *
      * @param cameraId The camera id to create the CaptureRequest for.
      * @param templateId The template id create the CaptureRequest for.
-     * @param deviceId the device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -376,7 +387,7 @@
      * @return Metadata representing the CaptureRequest.
      */
     CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Check whether a particular session configuration with optional session parameters
@@ -385,7 +396,7 @@
      * @param cameraId The camera id to query session configuration for
      * @param targetSdkVersion the target sdk level of the application calling this function.
      * @param sessionConfiguration Specific session configuration to be verified.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -395,7 +406,7 @@
      */
     boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
             int targetSdkVersion, in SessionConfiguration sessionConfiguration,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Get the camera characteristics for a particular session configuration for
@@ -403,11 +414,14 @@
      *
      * @param cameraId ID of the device for which the session characteristics must be fetched.
      * @param targetSdkVersion the target sdk level of the application calling this function.
-     * @param overrideToPortrait Whether to override the sensor orientation information to
-     *                           correspond to portrait.
+     * @param rotationOverride Whether to override the sensor orientation information to
+     *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
+     *        will override the sensor orientation and rotate and crop, while {@link
+     *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
+     *        without changing the sensor orientation.
      * @param sessionConfiguration Session configuration for which the characteristics
      *                             must be fetched.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -416,8 +430,8 @@
      */
     CameraMetadataNative getSessionCharacteristics(@utf8InCpp String cameraId,
             int targetSdkVersion,
-            boolean overrideToPortrait,
+            int rotationOverride,
             in SessionConfiguration sessionConfiguration,
-            int deviceId,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 }
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index dcd69b0..887a68b 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -16,6 +16,7 @@
 
 package android.hardware;
 
+import android.hardware.CameraFeatureCombinationStats;
 import android.hardware.CameraSessionStats;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -38,6 +39,12 @@
     oneway void notifyCameraState(in CameraSessionStats cameraSessionStats);
 
     /**
+     * Notify feature combination query for a camera device.
+     */
+    oneway void notifyFeatureCombinationStats(
+            in CameraFeatureCombinationStats cameraFeatureCombinationStats);
+
+    /**
      * Returns the necessary rotate and crop override for the top activity which
      * will be one of ({@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_NONE},
      * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_90},
diff --git a/camera/camera2/ConcurrentCamera.cpp b/camera/camera2/ConcurrentCamera.cpp
index 67aa876..ac442ed 100644
--- a/camera/camera2/ConcurrentCamera.cpp
+++ b/camera/camera2/ConcurrentCamera.cpp
@@ -32,7 +32,8 @@
 ConcurrentCameraIdCombination::ConcurrentCameraIdCombination() = default;
 
 ConcurrentCameraIdCombination::ConcurrentCameraIdCombination(
-        std::vector<std::string> &&combination) : mConcurrentCameraIds(std::move(combination)) { }
+        std::vector<std::pair<std::string, int32_t>> &&combination)
+            : mConcurrentCameraIdDeviceIdPairs(std::move(combination)) { }
 
 ConcurrentCameraIdCombination::~ConcurrentCameraIdCombination() = default;
 
@@ -42,25 +43,29 @@
         return BAD_VALUE;
     }
     status_t err = OK;
-    mConcurrentCameraIds.clear();
-    int32_t cameraIdCount = 0;
-    if ((err = parcel->readInt32(&cameraIdCount)) != OK) {
-        ALOGE("%s: Failed to read the camera id count from parcel: %d", __FUNCTION__, err);
+    mConcurrentCameraIdDeviceIdPairs.clear();
+    int32_t cameraCount = 0;
+    if ((err = parcel->readInt32(&cameraCount)) != OK) {
+        ALOGE("%s: Failed to read the camera count from parcel: %d", __FUNCTION__, err);
         return err;
     }
-    for (int32_t i = 0; i < cameraIdCount; i++) {
-        String16 id;
-        if ((err = parcel->readString16(&id)) != OK) {
+    for (int32_t i = 0; i < cameraCount; i++) {
+        String16 cameraId;
+        if ((err = parcel->readString16(&cameraId)) != OK) {
             ALOGE("%s: Failed to read camera id!", __FUNCTION__);
             return err;
         }
-        mConcurrentCameraIds.push_back(toStdString(id));
+        int32_t deviceId;
+        if ((err = parcel->readInt32(&deviceId)) != OK) {
+            ALOGE("%s: Failed to read device id!", __FUNCTION__);
+            return err;
+        }
+        mConcurrentCameraIdDeviceIdPairs.push_back({toStdString(cameraId), deviceId});
     }
     return OK;
 }
 
 status_t ConcurrentCameraIdCombination::writeToParcel(android::Parcel* parcel) const {
-
     if (parcel == nullptr) {
         ALOGE("%s: Null parcel", __FUNCTION__);
         return BAD_VALUE;
@@ -68,16 +73,20 @@
 
     status_t err = OK;
 
-    if ((err = parcel->writeInt32(mConcurrentCameraIds.size())) != OK) {
+    if ((err = parcel->writeInt32(mConcurrentCameraIdDeviceIdPairs.size())) != OK) {
         ALOGE("%s: Failed to write the camera id count to parcel: %d", __FUNCTION__, err);
         return err;
     }
 
-    for (const auto &it : mConcurrentCameraIds) {
-        if ((err = parcel->writeString16(toString16(it))) != OK) {
+    for (const auto &it : mConcurrentCameraIdDeviceIdPairs) {
+        if ((err = parcel->writeString16(toString16(it.first))) != OK) {
             ALOGE("%s: Failed to write the camera id string to parcel: %d", __FUNCTION__, err);
             return err;
         }
+        if ((err = parcel->writeInt32(it.second)) != OK) {
+            ALOGE("%s: Failed to write the device id integer to parcel: %d", __FUNCTION__, err);
+            return err;
+        }
     }
     return OK;
 }
@@ -105,7 +114,6 @@
 }
 
 status_t CameraIdAndSessionConfiguration::writeToParcel(android::Parcel* parcel) const {
-
     if (parcel == nullptr) {
         ALOGE("%s: Null parcel", __FUNCTION__);
         return BAD_VALUE;
diff --git a/camera/camera2/SessionConfiguration.cpp b/camera/camera2/SessionConfiguration.cpp
index 2f1f22d..065d283 100644
--- a/camera/camera2/SessionConfiguration.cpp
+++ b/camera/camera2/SessionConfiguration.cpp
@@ -72,17 +72,15 @@
 
     bool hasSessionParameters = false;
     CameraMetadata settings;
-    if (flags::feature_combination_query()) {
-        if ((err = parcel->readBool(&hasSessionParameters)) != OK) {
-            ALOGE("%s: Failed to read hasSessionParameters flag from parcel", __FUNCTION__);
-            return err;
-        }
+    if ((err = parcel->readBool(&hasSessionParameters)) != OK) {
+        ALOGE("%s: Failed to read hasSessionParameters flag from parcel", __FUNCTION__);
+        return err;
+    }
 
-        if (hasSessionParameters) {
-            if ((err = settings.readFromParcel(parcel)) != OK) {
-                ALOGE("%s: Failed to read metadata flag from parcel", __FUNCTION__);
-                return err;
-            }
+    if (hasSessionParameters) {
+        if ((err = settings.readFromParcel(parcel)) != OK) {
+            ALOGE("%s: Failed to read metadata flag from parcel", __FUNCTION__);
+            return err;
         }
     }
 
@@ -94,10 +92,8 @@
     for (auto& stream : outputStreams) {
         mOutputStreams.push_back(stream);
     }
-    if (flags::feature_combination_query()) {
-        mHasSessionParameters = hasSessionParameters;
-        mSessionParameters = std::move(settings);
-    }
+    mHasSessionParameters = hasSessionParameters;
+    mSessionParameters = std::move(settings);
 
     return err;
 }
@@ -125,14 +121,12 @@
     err = parcel->writeParcelableVector(mOutputStreams);
     if (err != OK) return err;
 
-    if (flags::feature_combination_query()) {
-        err = parcel->writeBool(mHasSessionParameters);
-        if (err != OK) return err;
+    err = parcel->writeBool(mHasSessionParameters);
+    if (err != OK) return err;
 
-        if (mHasSessionParameters) {
-            err = mSessionParameters.writeToParcel(parcel);
-            if (err != OK) return err;
-        }
+    if (mHasSessionParameters) {
+        err = mSessionParameters.writeToParcel(parcel);
+        if (err != OK) return err;
     }
 
     return OK;
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index 4fcceae..ec36335 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -2,70 +2,35 @@
 container: "system"
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_hsum_permission"
-     is_exported: true
-     description: "Camera access by headless system user"
-     bug: "273539631"
+    namespace: "camera_platform"
+    name: "camera_hsum_permission"
+    is_exported: true
+    description: "Camera access by headless system user"
+    bug: "273539631"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "concert_mode"
-     is_exported: true
-     description: "Introduces a new concert mode camera extension type"
-     bug: "297083874"
+    namespace: "camera_platform"
+    name: "concert_mode"
+    is_exported: true
+    description: "Introduces a new concert mode camera extension type"
+    bug: "297083874"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "feature_combination_query"
-     is_exported: true
-     description: "Query feature combination support and session specific characteristics"
-     bug: "309627704"
+    namespace: "camera_platform"
+    name: "feature_combination_query"
+    is_exported: true
+    description: "Query feature combination support and session specific characteristics"
+    bug: "309627704"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "watch_foreground_changes"
-     description: "Request AppOps to notify changes in the foreground status of the client"
-     bug: "290086710"
-}
-
-flag {
-     namespace: "camera_platform"
-     name: "log_ultrawide_usage"
-     description: "Enable measuring how much usage there is for ultrawide-angle cameras"
-     bug: "300515796"
-}
-
-flag {
-     namespace: "camera_platform"
-     name: "camera_manual_flash_strength_control"
-     is_exported: true
-     description: "Flash brightness level control in manual flash mode"
-     bug: "238348881"
-}
-
-flag {
-     namespace: "camera_platform"
-     name: "lazy_aidl_wait_for_service"
-     description: "Use waitForService instead of getService with lazy AIDL HALs"
-     bug: "285546208"
-}
-
-flag {
-     namespace: "camera_platform"
-     name: "log_zoom_override_usage"
-     description: "Enable measuring how much usage there is for zoom settings overrde"
-     bug: "307409002"
-}
-
-flag {
-     namespace: "camera_platform"
-     name: "session_hal_buf_manager"
-     description: "Enable or disable HAL buffer manager as requested by the camera HAL"
-     bug: "311263114"
+    namespace: "camera_platform"
+    name: "camera_manual_flash_strength_control"
+    is_exported: true
+    description: "Flash brightness level control in manual flash mode"
+    bug: "238348881"
 }
 
 flag {
@@ -76,144 +41,126 @@
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_ae_mode_low_light_boost"
-     is_exported: true
-     description: "An AE mode that enables increased brightening in low light scenes"
-     bug: "312803148"
+    namespace: "camera_platform"
+    name: "camera_ae_mode_low_light_boost"
+    is_exported: true
+    description: "An AE mode that enables increased brightening in low light scenes"
+    bug: "312803148"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "multiresolution_imagereader_usage_config"
-     description: "Enable creating MultiResolutionImageReader with usage flag configuration"
-     bug: "301588215"
+    namespace: "camera_platform"
+    name: "multiresolution_imagereader_usage_config"
+    description: "Enable creating MultiResolutionImageReader with usage flag configuration"
+    bug: "301588215"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "use_ro_board_api_level_for_vndk_version"
-     description: "Enable using ro.board.api_level instead of ro.vndk.version to get VNDK version"
-     bug: "312315580"
+    namespace: "camera_platform"
+    name: "camera_extensions_characteristics_get"
+    is_exported: true
+    description: "Enable get extension specific camera characteristics API"
+    bug: "280649914"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_extensions_characteristics_get"
-     is_exported: true
-     description: "Enable get extension specific camera characteristics API"
-     bug: "280649914"
+    namespace: "camera_platform"
+    name: "return_buffers_outside_locks"
+    description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
+    bug: "315526878"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "delay_lazy_hal_instantiation"
-     description: "Only trigger lazy HAL instantiation when the HAL is needed for an operation."
-     bug: "319735068"
+    namespace: "camera_platform"
+    name: "camera_device_setup"
+    is_exported: true
+    description: "Create an intermediate Camera Device class for limited CameraDevice access."
+    bug: "320741775"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "return_buffers_outside_locks"
-     description: "Enable returning graphics buffers to buffer queues without holding the in-flight mutex"
-     bug: "315526878"
+    namespace: "camera_platform"
+    name: "camera_privacy_allowlist"
+    is_exported: true
+    description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
+    bug: "282814430"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_device_setup"
-     is_exported: true
-     description: "Create an intermediate Camera Device class for limited CameraDevice access."
-     bug: "320741775"
+    namespace: "camera_platform"
+    name: "extension_10_bit"
+    is_exported: true
+    description: "Enables 10-bit support in the camera extensions."
+    bug: "316375635"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "camera_privacy_allowlist"
-     is_exported: true
-     description: "Allowlisting to exempt safety-relevant cameras from privacy control for automotive devices"
-     bug: "282814430"
+    namespace: "camera_platform"
+    name: "single_thread_executor_naming"
+    description: "Set the device executor thread name."
+    bug: "359709863"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "surface_ipc"
-     description: "Optimize Surface binder IPC"
-     bug: "323292530"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "analytics_24q3"
+    description: "Miscellaneous camera platform metrics for 24Q3"
+    bug: "332557570"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "extension_10_bit"
-     is_exported: true
-     description: "Enables 10-bit support in the camera extensions."
-     bug: "316375635"
+    namespace: "camera_platform"
+    name: "multi_res_raw_reprocessing"
+    description: "Allow multi-resolution raw reprocessing without reprocessing capability"
+    bug: "336922859"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "single_thread_executor"
-     description: "Ensure device logic is run within one thread."
-     bug: "305857746"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "api1_release_binderlock_before_cameraservice_disconnect"
+    description: "Drop mSerializationLock in Camera1 client when calling into CameraService"
+    bug: "351778072"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "surface_leak_fix"
-     description: "Address Surface release leaks in CaptureRequest"
-     bug: "324071855"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "bump_preview_frame_space_priority"
+    description: "Increase the PreviewFrameSpacer thread priority"
+    bug: "355665306"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "concert_mode_api"
-     description: "Covers the eyes free videography public facing API"
-     bug: "297083874"
-}
-
-
-flag {
-     namespace: "camera_platform"
-     name: "cache_permission_services"
-     description: "Cache IPermissionController and IPermissionChecker in CameraService to reduce query latency."
-     bug: "326139956"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "dumpsys_request_stream_ids"
+    description: "Add stream id information to last request dumpsys"
+    bug: "357913929"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "check_session_support_before_session_char"
-     description: "Validate that a SessionConfiguration is supported before fetching SessionCharacteristics."
-     bug: "327008530"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
+    namespace: "camera_platform"
+    name: "enable_hal_abort_from_cameraservicewatchdog"
+    description: "Enable CameraServiceWatchdog to abort camera HAL to generate HAL tombstones"
+    bug: "349652177"
 }
 
 flag {
-     namespace: "camera_platform"
-     name: "calculate_perf_override_during_session_support"
-     description: "Dynamically calulate whether perf class override should be set in isSessionConfigurationWithParametersSupported."
-     bug: "332975108"
-     metadata {
-       purpose: PURPOSE_BUGFIX
-     }
-}
-
-flag {
-     namespace: "camera_platform"
-     name: "analytics_24q3"
-     description: "Miscellaneous camera platform metrics for 24Q3"
-     bug: "332557570"
+    namespace: "camera_platform"
+    name: "enable_stream_reconfiguration_for_unchanged_streams"
+    description: "Enable stream reconfiguration for unchanged streams"
+    bug: "341740105"
 }
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index dfa53d2..646b139 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -59,7 +59,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const std::string&, int, int, int, bool, bool, int32_t, int32_t,
+        int, int, int, bool, const AttributionSourceState&, int32_t,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -81,10 +81,9 @@
             // construct a camera client from an existing remote
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
-                                const std::string& clientPackageName,
-                                int clientUid, int clientPid, int targetSdkVersion,
-                                bool overrideToPortrait, bool forceSlowJpegMode,
-                                int32_t deviceId = kDefaultDeviceId, int32_t devicePolicy = 0);
+                                int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+                                const AttributionSourceState& clientAttribution,
+                                int32_t devicePolicy = 0);
 
             virtual     ~Camera();
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 85ddbd6..d98abe4 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_HARDWARE_CAMERA_BASE_H
 #define ANDROID_HARDWARE_CAMERA_BASE_H
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/ICameraServiceListener.h>
 
 #include <utils/Mutex.h>
@@ -107,6 +108,7 @@
 
 } // namespace hardware
 
+using content::AttributionSourceState;
 using hardware::CameraInfo;
 
 template <typename TCam>
@@ -123,19 +125,19 @@
     typedef typename TCamTraits::TCamConnectService TCamConnectService;
 
     static sp<TCam>      connect(int cameraId,
-                                 const std::string& clientPackageName,
-                                 int clientUid, int clientPid, int targetSdkVersion,
-                                 bool overrideToPortrait, bool forceSlowJpegMode,
-                                 int32_t deviceId, int32_t devicePolicy);
+                                 int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+                                 const AttributionSourceState &clientAttribution,
+                                 int32_t devicePolicy);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
 
-    static int           getNumberOfCameras(int32_t deviceId, int32_t devicePolicy);
+    static int           getNumberOfCameras(const AttributionSourceState& clientAttribution,
+                                            int32_t devicePolicy);
 
     static status_t      getCameraInfo(int cameraId,
-                                       bool overrideToPortrait,
-                                       int32_t deviceId,
+                                       int rotationOverride,
+                                       const AttributionSourceState& clientAttribution,
                                        int32_t devicePolicy,
                                        /*out*/
                                        struct hardware::CameraInfo* cameraInfo);
diff --git a/camera/include/camera/camera2/ConcurrentCamera.h b/camera/include/camera/camera2/ConcurrentCamera.h
index ac99fd5..2a65da8 100644
--- a/camera/include/camera/camera2/ConcurrentCamera.h
+++ b/camera/include/camera/camera2/ConcurrentCamera.h
@@ -28,9 +28,9 @@
 namespace utils {
 
 struct ConcurrentCameraIdCombination : public Parcelable {
-    std::vector<std::string> mConcurrentCameraIds;
+    std::vector<std::pair<std::string, int32_t>> mConcurrentCameraIdDeviceIdPairs;
     ConcurrentCameraIdCombination();
-    ConcurrentCameraIdCombination(std::vector<std::string> &&combination);
+    ConcurrentCameraIdCombination(std::vector<std::pair<std::string, int32_t>> &&combination);
     virtual ~ConcurrentCameraIdCombination();
 
     virtual status_t writeToParcel(android::Parcel *parcel) const override;
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 421469a..379c0b5 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -66,38 +66,41 @@
 cc_library_shared {
     name: "libcamera2ndk",
     srcs: [
+        "NdkCameraCaptureSession.cpp",
+        "NdkCameraDevice.cpp",
         "NdkCameraManager.cpp",
         "NdkCameraMetadata.cpp",
-        "NdkCameraDevice.cpp",
         "NdkCaptureRequest.cpp",
-        "NdkCameraCaptureSession.cpp",
+        "impl/ACameraCaptureSession.cpp",
+        "impl/ACameraDevice.cpp",
         "impl/ACameraManager.cpp",
         "impl/ACameraMetadata.cpp",
-        "impl/ACameraDevice.cpp",
-        "impl/ACameraCaptureSession.cpp",
     ],
     shared_libs: [
-        "libbinder",
-        "liblog",
-        "libgui",
-        "libutils",
+        "android.companion.virtual.virtualdevice_aidl-cpp",
+        "android.companion.virtualdevice.flags-aconfig-cc",
+        "framework-permission-aidl-cpp",
         "libandroid_runtime",
+        "libbinder",
         "libcamera_client",
-        "libstagefright_foundation",
-        "libcutils",
         "libcamera_metadata",
+        "libcutils",
+        "libgui",
+        "liblog",
         "libmediandk",
         "libnativewindow",
+        "libstagefright_foundation",
+        "libutils",
     ],
     header_libs: [
         "jni_headers",
     ],
     cflags: [
-        "-fvisibility=hidden",
         "-DEXPORT=__attribute__((visibility(\"default\")))",
         "-Wall",
-        "-Wextra",
         "-Werror",
+        "-Wextra",
+        "-fvisibility=hidden",
     ],
     // TODO: jchowdhary@, use header_libs instead b/131165718
     include_dirs: [
@@ -115,16 +118,16 @@
     cpp_std: "gnu++17",
     vendor: true,
     srcs: [
+        "NdkCameraCaptureSession.cpp",
+        "NdkCameraDevice.cpp",
+        "NdkCameraManager.cpp",
+        "NdkCameraMetadata.cpp",
+        "NdkCaptureRequest.cpp",
+        "impl/ACameraCaptureSession.cpp",
+        "impl/ACameraMetadata.cpp",
         "ndk_vendor/impl/ACameraDevice.cpp",
         "ndk_vendor/impl/ACameraManager.cpp",
         "ndk_vendor/impl/utils.cpp",
-        "impl/ACameraMetadata.cpp",
-        "impl/ACameraCaptureSession.cpp",
-        "NdkCameraMetadata.cpp",
-        "NdkCameraCaptureSession.cpp",
-        "NdkCameraManager.cpp",
-        "NdkCameraDevice.cpp",
-        "NdkCaptureRequest.cpp",
     ],
 
     export_include_dirs: ["include"],
@@ -133,30 +136,30 @@
     ],
     local_include_dirs: [
         ".",
-        "include",
         "impl",
+        "include",
     ],
     cflags: [
-        "-fvisibility=hidden",
         "-DEXPORT=__attribute__((visibility(\"default\")))",
         "-D__ANDROID_VNDK__",
+        "-fvisibility=hidden",
     ],
 
     shared_libs: [
-        "libbinder_ndk",
-        "libfmq",
-        "libhidlbase",
-        "libhardware",
-        "libnativewindow",
-        "liblog",
-        "libutils",
-        "libstagefright_foundation",
-        "libcutils",
-        "libcamera_metadata",
-        "libmediandk",
         "android.frameworks.cameraservice.common-V1-ndk",
         "android.frameworks.cameraservice.device-V2-ndk",
         "android.frameworks.cameraservice.service-V2-ndk",
+        "libbinder_ndk",
+        "libcamera_metadata",
+        "libcutils",
+        "libfmq",
+        "libhardware",
+        "libhidlbase",
+        "liblog",
+        "libmediandk",
+        "libnativewindow",
+        "libstagefright_foundation",
+        "libutils",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
@@ -173,19 +176,19 @@
     name: "ACameraNdkVendorTest",
     vendor: true,
     srcs: [
-        "ndk_vendor/tests/AImageReaderVendorTest.cpp",
         "ndk_vendor/tests/ACameraManagerTest.cpp",
+        "ndk_vendor/tests/AImageReaderVendorTest.cpp",
     ],
     shared_libs: [
         "libcamera2ndk_vendor",
         "libcamera_metadata",
+        "libcutils",
         "libhidlbase",
+        "liblog",
         "libmediandk",
         "libnativewindow",
-        "libutils",
         "libui",
-        "libcutils",
-        "liblog",
+        "libutils",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
diff --git a/camera/ndk/NdkCameraManager.cpp b/camera/ndk/NdkCameraManager.cpp
index 2de4a50..1b3343e 100644
--- a/camera/ndk/NdkCameraManager.cpp
+++ b/camera/ndk/NdkCameraManager.cpp
@@ -68,7 +68,7 @@
 
 EXPORT
 camera_status_t ACameraManager_registerAvailabilityCallback(
-        ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -81,13 +81,13 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->registerAvailabilityCallback(callback);
+    manager->registerAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_unregisterAvailabilityCallback(
-        ACameraManager*, const ACameraManager_AvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -100,13 +100,13 @@
                callback->onCameraAvailable, callback->onCameraUnavailable);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->unregisterAvailabilityCallback(callback);
+    manager->unregisterAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_registerExtendedAvailabilityCallback(
-        ACameraManager* /*manager*/, const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -131,13 +131,13 @@
             return ACAMERA_ERROR_INVALID_PARAMETER;
         }
     }
-    CameraManagerGlobal::getInstance()->registerExtendedAvailabilityCallback(callback);
+    manager->registerExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
 EXPORT
 camera_status_t ACameraManager_unregisterExtendedAvailabilityCallback(
-        ACameraManager* /*manager*/, const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        ACameraManager* manager, const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     ATRACE_CALL();
     if (callback == nullptr) {
         ALOGE("%s: invalid argument! callback is null!", __FUNCTION__);
@@ -154,7 +154,7 @@
                callback->onCameraAccessPrioritiesChanged);
         return ACAMERA_ERROR_INVALID_PARAMETER;
     }
-    CameraManagerGlobal::getInstance()->unregisterExtendedAvailabilityCallback(callback);
+    manager->unregisterExtendedAvailabilityCallback(callback);
     return ACAMERA_OK;
 }
 
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 0744992..6d29ef5 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -17,23 +17,108 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ACameraManager"
 
-#include <memory>
 #include "ACameraManager.h"
-#include "ACameraMetadata.h"
-#include "ACameraDevice.h"
-#include <utils/Vector.h>
-#include <cutils/properties.h>
-#include <stdlib.h>
+#include <android_companion_virtualdevice_flags.h>
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 #include <camera/VendorTagDescriptor.h>
+#include <cutils/properties.h>
+#include <stdlib.h>
+#include <utils/Vector.h>
+#include <memory>
+#include "ACameraDevice.h"
+#include "ACameraMetadata.h"
 
 using namespace android::acam;
+namespace vd_flags = android::companion::virtualdevice::flags;
 
 namespace android {
 namespace acam {
+namespace {
 
-// TODO(b/291736219): Add device-awareness to ACameraManager.
+using ::android::binder::Status;
+using ::android::companion::virtualnative::IVirtualDeviceManagerNative;
+
+// Return binder connection to VirtualDeviceManager.
+//
+// Subsequent calls return the same cached instance.
+sp<IVirtualDeviceManagerNative> getVirtualDeviceManager() {
+    auto connectToVirtualDeviceManagerNative = []() {
+        sp<IBinder> binder =
+                defaultServiceManager()->checkService(String16("virtualdevice_native"));
+        if (binder == nullptr) {
+            ALOGW("%s: Cannot get virtualdevice_native service", __func__);
+            return interface_cast<IVirtualDeviceManagerNative>(nullptr);
+        }
+        return interface_cast<IVirtualDeviceManagerNative>(binder);
+    };
+
+    static sp<IVirtualDeviceManagerNative> vdm = connectToVirtualDeviceManagerNative();
+    return vdm;
+}
+
+// Returns device id calling process is running on.
+// If the process cannot be attributed to single virtual device id, returns default device id.
+int getCurrentDeviceId() {
+    if (!vd_flags::camera_device_awareness()) {
+        return kDefaultDeviceId;
+    }
+
+    auto vdm = getVirtualDeviceManager();
+    if (vdm == nullptr) {
+        return kDefaultDeviceId;
+    }
+
+    const uid_t myUid = getuid();
+    std::vector<int> deviceIds;
+    Status status = vdm->getDeviceIdsForUid(myUid, &deviceIds);
+    if (!status.isOk() || deviceIds.empty()) {
+        ALOGE("%s: Failed to call getDeviceIdsForUid to determine device id for uid %d: %s",
+              __func__, myUid, status.toString8().c_str());
+        return kDefaultDeviceId;
+    }
+
+    // If the UID is associated with multiple virtual devices, use the default device's
+    // camera as we cannot disambiguate here. This effectively means that the app has
+    // activities on different devices at the same time.
+    if (deviceIds.size() != 1) {
+        return kDefaultDeviceId;
+    }
+    return deviceIds[0];
+}
+
+// Returns device policy for POLICY_TYPE_CAMERA corresponding to deviceId.
+DevicePolicy getDevicePolicyForDeviceId(const int deviceId) {
+    if (!vd_flags::camera_device_awareness() || deviceId == kDefaultDeviceId) {
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+
+    auto vdm = getVirtualDeviceManager();
+    if (vdm == nullptr) {
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+
+    int policy = IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT;
+    Status status = vdm->getDevicePolicy(deviceId, IVirtualDeviceManagerNative::POLICY_TYPE_CAMERA,
+                                         &policy);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed to call getDevicePolicy to determine camera policy for device id %d: %s",
+              __func__, deviceId, status.toString8().c_str());
+        return DevicePolicy::DEVICE_POLICY_DEFAULT;
+    }
+    return static_cast<DevicePolicy>(policy);
+}
+
+// Returns true if camera owned by device cameraDeviceId can be accessed within deviceContext.
+bool isCameraAccessible(const DeviceContext deviceContext, const int cameraDeviceId) {
+    if (!vd_flags::camera_device_awareness() ||
+        deviceContext.policy == DevicePolicy::DEVICE_POLICY_DEFAULT) {
+        return cameraDeviceId == kDefaultDeviceId;
+    }
+    return deviceContext.deviceId == cameraDeviceId;
+}
+
+}  // namespace
 
 // Static member definitions
 const char* CameraManagerGlobal::kCameraIdKey   = "CameraId";
@@ -44,6 +129,11 @@
 Mutex                CameraManagerGlobal::sLock;
 wp<CameraManagerGlobal> CameraManagerGlobal::sInstance = nullptr;
 
+DeviceContext::DeviceContext() {
+    deviceId = getCurrentDeviceId();
+    policy = getDevicePolicyForDeviceId(deviceId);
+}
+
 sp<CameraManagerGlobal> CameraManagerGlobal::getInstance() {
     Mutex::Autolock _l(sLock);
     sp<CameraManagerGlobal> instance = sInstance.promote();
@@ -87,14 +177,11 @@
 
         sp<IServiceManager> sm = defaultServiceManager();
         sp<IBinder> binder;
-        do {
-            binder = sm->getService(toString16(kCameraServiceName));
-            if (binder != nullptr) {
-                break;
-            }
-            ALOGW("CameraService not published, waiting...");
-            usleep(kCameraServicePollDelay);
-        } while(true);
+        binder = sm->checkService(String16(kCameraServiceName));
+        if (binder == nullptr) {
+            ALOGE("%s: Could not get CameraService instance.", __FUNCTION__);
+            return nullptr;
+        }
         if (mDeathNotifier == nullptr) {
             mDeathNotifier = new DeathNotifier(this);
         }
@@ -128,17 +215,11 @@
         std::vector<hardware::CameraStatus> cameraStatuses{};
         mCameraService->addListener(mCameraServiceListener, &cameraStatuses);
         for (auto& c : cameraStatuses) {
-            // Skip callback for cameras not belonging to the default device, as NDK doesn't support
-            // device awareness yet.
-            if (c.deviceId != kDefaultDeviceId) {
-                continue;
-            }
-
-            onStatusChangedLocked(c.status, c.cameraId);
+            onStatusChangedLocked(c.status, c.deviceId, c.cameraId);
 
             for (auto& unavailablePhysicalId : c.unavailablePhysicalIds) {
                 onStatusChangedLocked(hardware::ICameraServiceListener::STATUS_NOT_PRESENT,
-                        c.cameraId, unavailablePhysicalId);
+                                      c.deviceId, c.cameraId, unavailablePhysicalId);
             }
         }
 
@@ -198,14 +279,15 @@
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
         AutoMutex lock(cm->mLock);
-        std::vector<std::string> cameraIdList;
+        std::vector<DeviceStatusMapKey> keysToRemove;
+        keysToRemove.reserve(cm->mDeviceStatusMap.size());
         for (auto& pair : cm->mDeviceStatusMap) {
-            cameraIdList.push_back(pair.first);
+            keysToRemove.push_back(pair.first);
         }
 
-        for (const std::string& cameraId : cameraIdList) {
-            cm->onStatusChangedLocked(
-                    CameraServiceListener::STATUS_NOT_PRESENT, cameraId);
+        for (const DeviceStatusMapKey& key : keysToRemove) {
+            cm->onStatusChangedLocked(CameraServiceListener::STATUS_NOT_PRESENT, key.deviceId,
+                                      key.cameraId);
         }
         cm->mCameraService.clear();
         // TODO: consider adding re-connect call here?
@@ -213,32 +295,35 @@
 }
 
 void CameraManagerGlobal::registerExtendedAvailabilityCallback(
-        const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
-    return registerAvailCallback<ACameraManager_ExtendedAvailabilityCallbacks>(callback);
+        const DeviceContext& deviceContext,
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    return registerAvailCallback<ACameraManager_ExtendedAvailabilityCallbacks>(deviceContext,
+                                                                               callback);
 }
 
 void CameraManagerGlobal::unregisterExtendedAvailabilityCallback(
-        const ACameraManager_ExtendedAvailabilityCallbacks *callback) {
+        const DeviceContext& deviceContext,
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
     Mutex::Autolock _l(mLock);
 
     drainPendingCallbacksLocked();
 
-    Callback cb(callback);
+    Callback cb(deviceContext, callback);
     mCallbacks.erase(cb);
 }
 
 void CameraManagerGlobal::registerAvailabilityCallback(
-        const ACameraManager_AvailabilityCallbacks *callback) {
-    return registerAvailCallback<ACameraManager_AvailabilityCallbacks>(callback);
+        const DeviceContext& deviceContext, const ACameraManager_AvailabilityCallbacks* callback) {
+    return registerAvailCallback<ACameraManager_AvailabilityCallbacks>(deviceContext, callback);
 }
 
 void CameraManagerGlobal::unregisterAvailabilityCallback(
-        const ACameraManager_AvailabilityCallbacks *callback) {
+        const DeviceContext& deviceContext, const ACameraManager_AvailabilityCallbacks* callback) {
     Mutex::Autolock _l(mLock);
 
     drainPendingCallbacksLocked();
 
-    Callback cb(callback);
+    Callback cb(deviceContext, callback);
     mCallbacks.erase(cb);
 }
 
@@ -261,20 +346,24 @@
     }
 }
 
-template<class T>
-void CameraManagerGlobal::registerAvailCallback(const T *callback) {
+template <class T>
+void CameraManagerGlobal::registerAvailCallback(const DeviceContext& deviceContext,
+                                                const T* callback) {
     Mutex::Autolock _l(mLock);
     getCameraServiceLocked();
-    Callback cb(callback);
-    auto pair = mCallbacks.insert(cb);
+    Callback cb(deviceContext, callback);
+    const auto& [_, newlyRegistered] = mCallbacks.insert(cb);
     // Send initial callbacks if callback is newly registered
-    if (pair.second) {
-        for (auto& pair : mDeviceStatusMap) {
-            const std::string& cameraId = pair.first;
-            int32_t status = pair.second.getStatus();
+    if (newlyRegistered) {
+        for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+            if (!isCameraAccessible(deviceContext, key.deviceId)) {
+                continue;
+            }
+            const std::string& cameraId = key.cameraId;
+            int32_t status = statusAndHAL3Support.getStatus();
             // Don't send initial callbacks for camera ids which don't support
             // camera2
-            if (!pair.second.supportsHAL3) {
+            if (!statusAndHAL3Support.supportsHAL3) {
                 continue;
             }
 
@@ -290,7 +379,7 @@
 
             // Physical camera unavailable callback
             std::set<std::string> unavailablePhysicalCameras =
-                    pair.second.getUnavailablePhysicalIds();
+                    statusAndHAL3Support.getUnavailablePhysicalIds();
             for (const auto& physicalCameraId : unavailablePhysicalCameras) {
                 sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
                 ACameraManager_PhysicalCameraAvailabilityCallback cbFunc =
@@ -320,21 +409,26 @@
     return camera2Support;
 }
 
-void CameraManagerGlobal::getCameraIdList(std::vector<std::string>* cameraIds) {
+void CameraManagerGlobal::getCameraIdList(const DeviceContext& context,
+        std::vector<std::string>* cameraIds) {
     // Ensure that we have initialized/refreshed the list of available devices
     Mutex::Autolock _l(mLock);
     // Needed to make sure we're connected to cameraservice
     getCameraServiceLocked();
-    for(auto& deviceStatus : mDeviceStatusMap) {
-        int32_t status = deviceStatus.second.getStatus();
+    for (auto& [key, statusAndHAL3Support] : mDeviceStatusMap) {
+        if (!isCameraAccessible(context, key.deviceId)) {
+            continue;
+        }
+
+        int32_t status = statusAndHAL3Support.getStatus();
         if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT ||
                 status == hardware::ICameraServiceListener::STATUS_ENUMERATING) {
             continue;
         }
-        if (!deviceStatus.second.supportsHAL3) {
+        if (!statusAndHAL3Support.supportsHAL3) {
             continue;
         }
-        cameraIds->push_back(deviceStatus.first);
+        cameraIds->push_back(key.cameraId);
     }
 }
 
@@ -471,36 +565,24 @@
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onStatusChanged(
         int32_t status, const std::string& cameraId, int deviceId) {
-    // Skip callback for cameras not belonging to the default device, as NDK doesn't support
-    // device awareness yet.
-    if (deviceId != kDefaultDeviceId) {
-        return binder::Status::ok();
-    }
-
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
-        cm->onStatusChanged(status, cameraId);
-    } else {
-        ALOGE("Cannot deliver status change. Global camera manager died");
+        cm->onStatusChanged(status, deviceId, cameraId);
     }
+    ALOGE_IF(cm == nullptr,
+             "Cannot deliver physical camera status change. Global camera manager died");
     return binder::Status::ok();
 }
 
 binder::Status CameraManagerGlobal::CameraServiceListener::onPhysicalCameraStatusChanged(
         int32_t status, const std::string& cameraId, const std::string& physicalCameraId,
         int deviceId) {
-    // Skip callback for cameras not belonging to the default device, as NDK doesn't support
-    // device awareness yet.
-    if (deviceId != kDefaultDeviceId) {
-        return binder::Status::ok();
-    }
-
     sp<CameraManagerGlobal> cm = mCameraManager.promote();
     if (cm != nullptr) {
-        cm->onStatusChanged(status, cameraId, physicalCameraId);
-    } else {
-        ALOGE("Cannot deliver physical camera status change. Global camera manager died");
+        cm->onStatusChanged(status, deviceId, cameraId, physicalCameraId);
     }
+    ALOGE_IF(cm == nullptr,
+             "Cannot deliver physical camera status change. Global camera manager died");
     return binder::Status::ok();
 }
 
@@ -518,23 +600,24 @@
     }
 }
 
-void CameraManagerGlobal::onStatusChanged(
-        int32_t status, const std::string& cameraId) {
+void CameraManagerGlobal::onStatusChanged(int32_t status, const int deviceId,
+        const std::string& cameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(status, cameraId);
+    onStatusChangedLocked(status, deviceId, cameraId);
 }
 
-void CameraManagerGlobal::onStatusChangedLocked(
-        int32_t status, const std::string& cameraId) {
+void CameraManagerGlobal::onStatusChangedLocked(int32_t status, const int deviceId,
+        const std::string& cameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
     }
 
-    bool firstStatus = (mDeviceStatusMap.count(cameraId) == 0);
-    int32_t oldStatus = firstStatus ?
-            status : // first status
-            mDeviceStatusMap[cameraId].getStatus();
+    DeviceStatusMapKey key{.deviceId = deviceId, .cameraId = cameraId};
+
+    bool firstStatus = (mDeviceStatusMap.count(key) == 0);
+    int32_t oldStatus = firstStatus ? status :  // first status
+                                mDeviceStatusMap[key].getStatus();
 
     if (!firstStatus &&
             isStatusAvailable(status) == isStatusAvailable(oldStatus)) {
@@ -544,15 +627,17 @@
 
     bool supportsHAL3 = supportsCamera2ApiLocked(cameraId);
     if (firstStatus) {
-        mDeviceStatusMap.emplace(std::piecewise_construct,
-                std::forward_as_tuple(cameraId),
-                std::forward_as_tuple(status, supportsHAL3));
+        mDeviceStatusMap.emplace(std::piecewise_construct, std::forward_as_tuple(key),
+                                 std::forward_as_tuple(status, supportsHAL3));
     } else {
-        mDeviceStatusMap[cameraId].updateStatus(status);
+        mDeviceStatusMap[key].updateStatus(status);
     }
     // Iterate through all registered callbacks
     if (supportsHAL3) {
         for (auto cb : mCallbacks) {
+            if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+                continue;
+            }
             sp<AMessage> msg = new AMessage(kWhatSendSingleCallback, mHandler);
             ACameraManager_AvailabilityCallback cbFp = isStatusAvailable(status) ?
                     cb.mAvailable : cb.mUnavailable;
@@ -564,30 +649,31 @@
         }
     }
     if (status == hardware::ICameraServiceListener::STATUS_NOT_PRESENT) {
-        mDeviceStatusMap.erase(cameraId);
+        mDeviceStatusMap.erase(key);
     }
 }
 
-void CameraManagerGlobal::onStatusChanged(
-        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+void CameraManagerGlobal::onStatusChanged(int32_t status, const int deviceId,
+        const std::string& cameraId, const std::string& physicalCameraId) {
     Mutex::Autolock _l(mLock);
-    onStatusChangedLocked(status, cameraId, physicalCameraId);
+    onStatusChangedLocked(status, deviceId, cameraId, physicalCameraId);
 }
 
-void CameraManagerGlobal::onStatusChangedLocked(
-        int32_t status, const std::string& cameraId, const std::string& physicalCameraId) {
+void CameraManagerGlobal::onStatusChangedLocked(int32_t status, const int deviceId,
+        const std::string& cameraId, const std::string& physicalCameraId) {
     if (!validStatus(status)) {
         ALOGE("%s: Invalid status %d", __FUNCTION__, status);
         return;
     }
 
-    auto logicalStatus = mDeviceStatusMap.find(cameraId);
+    DeviceStatusMapKey key{.deviceId = deviceId, .cameraId = cameraId};
+    auto logicalStatus = mDeviceStatusMap.find(key);
     if (logicalStatus == mDeviceStatusMap.end()) {
         ALOGE("%s: Physical camera id %s status change on a non-present id %s",
                 __FUNCTION__, physicalCameraId.c_str(), cameraId.c_str());
         return;
     }
-    int32_t logicalCamStatus = mDeviceStatusMap[cameraId].getStatus();
+    int32_t logicalCamStatus = mDeviceStatusMap[key].getStatus();
     if (logicalCamStatus != hardware::ICameraServiceListener::STATUS_PRESENT &&
             logicalCamStatus != hardware::ICameraServiceListener::STATUS_NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
@@ -599,14 +685,17 @@
 
     bool updated = false;
     if (status == hardware::ICameraServiceListener::STATUS_PRESENT) {
-        updated = mDeviceStatusMap[cameraId].removeUnavailablePhysicalId(physicalCameraId);
+        updated = mDeviceStatusMap[key].removeUnavailablePhysicalId(physicalCameraId);
     } else {
-        updated = mDeviceStatusMap[cameraId].addUnavailablePhysicalId(physicalCameraId);
+        updated = mDeviceStatusMap[key].addUnavailablePhysicalId(physicalCameraId);
     }
 
     // Iterate through all registered callbacks
     if (supportsHAL3 && updated) {
         for (auto cb : mCallbacks) {
+            if (!isCameraAccessible(cb.mDeviceContext, deviceId)) {
+                continue;
+            }
             sp<AMessage> msg = new AMessage(kWhatSendSinglePhysicalCameraCallback, mHandler);
             ACameraManager_PhysicalCameraAvailabilityCallback cbFp = isStatusAvailable(status) ?
                     cb.mPhysicalCamAvailable : cb.mPhysicalCamUnavailable;
@@ -660,7 +749,7 @@
     Mutex::Autolock _l(mLock);
 
     std::vector<std::string> idList;
-    CameraManagerGlobal::getInstance()->getCameraIdList(&idList);
+    mGlobalManager->getCameraIdList(mDeviceContext, &idList);
 
     int numCameras = idList.size();
     ACameraIdList *out = new ACameraIdList;
@@ -710,7 +799,7 @@
         const char* cameraIdStr, sp<ACameraMetadata>* characteristics) {
     Mutex::Autolock _l(mLock);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
+    sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         return ACAMERA_ERROR_CAMERA_DISCONNECTED;
@@ -718,8 +807,15 @@
 
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
+
+    AttributionSourceState clientAttribution;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+    clientAttribution.deviceId = mDeviceContext.deviceId;
+
     binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
-            targetSdkVersion, /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+            targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
             &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -757,7 +853,7 @@
 
     ACameraDevice* device = new ACameraDevice(cameraId, callback, chars);
 
-    sp<hardware::ICameraService> cs = CameraManagerGlobal::getInstance()->getCameraService();
+    sp<hardware::ICameraService> cs = mGlobalManager->getCameraService();
     if (cs == nullptr) {
         ALOGE("%s: Cannot reach camera service!", __FUNCTION__);
         delete device;
@@ -767,12 +863,20 @@
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = device->getServiceCallback();
     sp<hardware::camera2::ICameraDeviceUser> deviceRemote;
     int targetSdkVersion = android_get_application_target_sdk_version();
+
+    AttributionSourceState clientAttribution;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+    clientAttribution.deviceId = mDeviceContext.deviceId;
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
+
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     binder::Status serviceRet = cs->connectDevice(
-            callbacks, cameraId, "", {},
-            hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
-            targetSdkVersion, /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+            callbacks, cameraId, /*oomScoreOffset*/0,
+            targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
             /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
@@ -820,6 +924,22 @@
     return ACAMERA_OK;
 }
 
-ACameraManager::~ACameraManager() {
+void ACameraManager::registerAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->registerAvailabilityCallback(mDeviceContext, callback);
+}
 
+void ACameraManager::unregisterAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterAvailabilityCallback(mDeviceContext, callback);
+}
+
+void ACameraManager::registerExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->registerExtendedAvailabilityCallback(mDeviceContext, callback);
+}
+
+void ACameraManager::unregisterExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterExtendedAvailabilityCallback(mDeviceContext, callback);
 }
diff --git a/camera/ndk/impl/ACameraManager.h b/camera/ndk/impl/ACameraManager.h
index c6e2bf9..f4124ef 100644
--- a/camera/ndk/impl/ACameraManager.h
+++ b/camera/ndk/impl/ACameraManager.h
@@ -20,6 +20,7 @@
 #include <camera/NdkCameraManager.h>
 
 #include <android-base/parseint.h>
+#include <android/companion/virtualnative/IVirtualDeviceManagerNative.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <camera/CameraMetadata.h>
@@ -37,6 +38,36 @@
 namespace android {
 namespace acam {
 
+enum class DevicePolicy {
+  DEVICE_POLICY_DEFAULT =
+    ::android::companion::virtualnative::IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT,
+  DEVICE_POLICY_CUSTOM =
+    ::android::companion::virtualnative::IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM
+};
+
+/**
+ * Device context within which are cameras accessed.
+ *
+ * When constructed, the device id is set to id of virtual device corresponding to
+ * caller's UID (or default device id if current app process is not running on virtual device).
+ *
+ * See getDeviceId() in Context.java for more context (no pun intented).
+ */
+struct DeviceContext {
+    DeviceContext();
+
+    // Id of virtual device asociated with this context (or DEFAULT_DEVICE_ID = 0 in case
+    // caller UID is not running on virtual device).
+    int deviceId;
+    // Device policy corresponding to VirtualDeviceParams.POLICY_TYPE_CAMERA:
+    //
+    // Can be either:
+    // * (0) DEVICE_POLICY_DEFAULT - virtual devices have access to default device cameras.
+    // * (1) DEVICE_POLICY_CUSTOM - virtual devices do not have access to default device cameras
+    //                              and can only access virtual cameras owned by the same device.
+    DevicePolicy policy;
+};
+
 /**
  * Per-process singleton instance of CameraManger. Shared by all ACameraManager
  * instances. Created when first ACameraManager is created and destroyed when
@@ -49,29 +80,30 @@
     static sp<CameraManagerGlobal> getInstance();
     sp<hardware::ICameraService> getCameraService();
 
-    void registerAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
-    void unregisterAvailabilityCallback(
-            const ACameraManager_AvailabilityCallbacks *callback);
+    void registerAvailabilityCallback(const DeviceContext& context,
+                                      const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const DeviceContext& context,
+                                        const ACameraManager_AvailabilityCallbacks* callback);
 
     void registerExtendedAvailabilityCallback(
+            const DeviceContext& context,
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
     void unregisterExtendedAvailabilityCallback(
+            const DeviceContext& context,
             const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
     /**
      * Return camera IDs that support camera2
      */
-    void getCameraIdList(std::vector<std::string> *cameraIds);
+    void getCameraIdList(const DeviceContext& deviceContext, std::vector<std::string>* cameraIds);
 
   private:
     sp<hardware::ICameraService> mCameraService;
-    const int                    kCameraServicePollDelay = 500000; // 0.5s
     const char*                  kCameraServiceName      = "media.camera";
     Mutex                        mLock;
 
-    template<class T>
-    void registerAvailCallback(const T *callback);
+    template <class T>
+    void registerAvailCallback(const DeviceContext& deviceContext, const T* callback);
 
     class DeathNotifier : public IBinder::DeathRecipient {
       public:
@@ -115,29 +147,34 @@
 
     // Wrapper of ACameraManager_AvailabilityCallbacks so we can store it in std::set
     struct Callback {
-        explicit Callback(const ACameraManager_AvailabilityCallbacks *callback) :
-            mAvailable(callback->onCameraAvailable),
-            mUnavailable(callback->onCameraUnavailable),
-            mAccessPriorityChanged(nullptr),
-            mPhysicalCamAvailable(nullptr),
-            mPhysicalCamUnavailable(nullptr),
-            mContext(callback->context) {}
+        explicit Callback(const DeviceContext& deviceContext,
+                 const ACameraManager_AvailabilityCallbacks* callback)
+            : mDeviceContext(deviceContext),
+              mAvailable(callback->onCameraAvailable),
+              mUnavailable(callback->onCameraUnavailable),
+              mAccessPriorityChanged(nullptr),
+              mPhysicalCamAvailable(nullptr),
+              mPhysicalCamUnavailable(nullptr),
+              mContext(callback->context) {}
 
-        explicit Callback(const ACameraManager_ExtendedAvailabilityCallbacks *callback) :
-            mAvailable(callback->availabilityCallbacks.onCameraAvailable),
-            mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
-            mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
-            mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
-            mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
-            mContext(callback->availabilityCallbacks.context) {}
+        explicit Callback(const DeviceContext& deviceContext,
+                 const ACameraManager_ExtendedAvailabilityCallbacks* callback)
+            : mDeviceContext(deviceContext),
+              mAvailable(callback->availabilityCallbacks.onCameraAvailable),
+              mUnavailable(callback->availabilityCallbacks.onCameraUnavailable),
+              mAccessPriorityChanged(callback->onCameraAccessPrioritiesChanged),
+              mPhysicalCamAvailable(callback->onPhysicalCameraAvailable),
+              mPhysicalCamUnavailable(callback->onPhysicalCameraUnavailable),
+              mContext(callback->availabilityCallbacks.context) {}
 
         bool operator == (const Callback& other) const {
-            return (mAvailable == other.mAvailable &&
-                    mUnavailable == other.mUnavailable &&
+            return (mAvailable == other.mAvailable && mUnavailable == other.mUnavailable &&
                     mAccessPriorityChanged == other.mAccessPriorityChanged &&
                     mPhysicalCamAvailable == other.mPhysicalCamAvailable &&
                     mPhysicalCamUnavailable == other.mPhysicalCamUnavailable &&
-                    mContext == other.mContext);
+                    mContext == other.mContext &&
+                    mDeviceContext.deviceId == other.mDeviceContext.deviceId &&
+                    mDeviceContext.policy == other.mDeviceContext.policy);
         }
         bool operator != (const Callback& other) const {
             return !(*this == other);
@@ -146,6 +183,9 @@
 #pragma GCC diagnostic push
 #pragma GCC diagnostic ignored "-Wordered-compare-function-pointers"
             if (*this == other) return false;
+            if (mDeviceContext.deviceId != other.mDeviceContext.deviceId) {
+                return mDeviceContext.deviceId < other.mDeviceContext.deviceId;
+            }
             if (mContext != other.mContext) return mContext < other.mContext;
             if (mPhysicalCamAvailable != other.mPhysicalCamAvailable) {
                 return mPhysicalCamAvailable < other.mPhysicalCamAvailable;
@@ -163,6 +203,7 @@
         bool operator > (const Callback& other) const {
             return (*this != other && !(*this < other));
         }
+        DeviceContext mDeviceContext;
         ACameraManager_AvailabilityCallback mAvailable;
         ACameraManager_AvailabilityCallback mUnavailable;
         ACameraManager_AccessPrioritiesChangedCallback mAccessPriorityChanged;
@@ -204,37 +245,17 @@
 
     sp<hardware::ICameraService> getCameraServiceLocked();
     void onCameraAccessPrioritiesChanged();
-    void onStatusChanged(int32_t status, const std::string& cameraId);
-    void onStatusChangedLocked(int32_t status, const std::string& cameraId);
-    void onStatusChanged(int32_t status, const std::string& cameraId, const std::string& physicalCameraId);
-    void onStatusChangedLocked(int32_t status, const std::string& cameraId,
-           const std::string& physicalCameraId);
+    void onStatusChanged(int32_t status, int deviceId, const std::string& cameraId);
+    void onStatusChangedLocked(int32_t status, int deviceId, const std::string& cameraId);
+    void onStatusChanged(int32_t status, int deviceId, const std::string& cameraId,
+                         const std::string& physicalCameraId);
+    void onStatusChangedLocked(int32_t status, int deviceId, const std::string& cameraId,
+                               const std::string& physicalCameraId);
     // Utils for status
     static bool validStatus(int32_t status);
     static bool isStatusAvailable(int32_t status);
     bool supportsCamera2ApiLocked(const std::string &cameraId);
 
-    // The sort logic must match the logic in
-    // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
-    struct CameraIdComparator {
-        bool operator()(const std::string& a, const std::string& b) const {
-            uint32_t aUint = 0, bUint = 0;
-            bool aIsUint = base::ParseUint(a.c_str(), &aUint);
-            bool bIsUint = base::ParseUint(b.c_str(), &bUint);
-
-            // Uint device IDs first
-            if (aIsUint && bIsUint) {
-                return aUint < bUint;
-            } else if (aIsUint) {
-                return true;
-            } else if (bIsUint) {
-                return false;
-            }
-            // Simple string compare if both id are not uint
-            return a < b;
-        }
-    };
-
     struct StatusAndHAL3Support {
       private:
         int32_t status = hardware::ICameraServiceListener::STATUS_NOT_PRESENT;
@@ -253,13 +274,40 @@
         std::set<std::string> getUnavailablePhysicalIds();
     };
 
-    // Map camera_id -> status
-    std::map<std::string, StatusAndHAL3Support, CameraIdComparator> mDeviceStatusMap;
+    struct DeviceStatusMapKey {
+        int deviceId;
+        std::string cameraId;
+
+        bool operator<(const DeviceStatusMapKey& other) const {
+            if (deviceId != other.deviceId) {
+                return deviceId < other.deviceId;
+            }
+
+            // The sort logic must match the logic in
+            // libcameraservice/common/CameraProviderManager.cpp::getAPI1CompatibleCameraDeviceIds
+            uint32_t cameraIdUint = 0, otherCameraIdUint = 0;
+            bool cameraIdIsUint = base::ParseUint(cameraId.c_str(), &cameraIdUint);
+            bool otherCameraIdIsUint = base::ParseUint(other.cameraId.c_str(), &otherCameraIdUint);
+
+            // Uint device IDs first
+            if (cameraIdIsUint && otherCameraIdIsUint) {
+                return cameraIdUint < otherCameraIdUint;
+            } else if (cameraIdIsUint) {
+                return true;
+            } else if (otherCameraIdIsUint) {
+                return false;
+            }
+            // Simple string compare if both id are not uint
+            return cameraIdIsUint < otherCameraIdIsUint;
+        }
+    };
+
+    std::map<DeviceStatusMapKey, StatusAndHAL3Support> mDeviceStatusMap;
 
     // For the singleton instance
     static Mutex sLock;
     static wp<CameraManagerGlobal> sInstance;
-    CameraManagerGlobal() {};
+    CameraManagerGlobal() {}
     ~CameraManagerGlobal();
 };
 
@@ -271,9 +319,7 @@
  * Leave outside of android namespace because it's NDK struct
  */
 struct ACameraManager {
-    ACameraManager() :
-            mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
-    ~ACameraManager();
+    ACameraManager() : mGlobalManager(android::acam::CameraManagerGlobal::getInstance()) {}
     camera_status_t getCameraIdList(ACameraIdList** cameraIdList);
     static void     deleteCameraIdList(ACameraIdList* cameraIdList);
 
@@ -282,6 +328,12 @@
     camera_status_t openCamera(const char* cameraId,
                                ACameraDevice_StateCallbacks* callback,
                                /*out*/ACameraDevice** device);
+    void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void registerExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+    void unregisterExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
   private:
     enum {
@@ -289,6 +341,7 @@
     };
     android::Mutex         mLock;
     android::sp<android::acam::CameraManagerGlobal> mGlobalManager;
+    const android::acam::DeviceContext mDeviceContext;
 };
 
 #endif //_ACAMERA_MANAGER_H
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index cf6b970..1400121 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -576,9 +576,7 @@
  *
  * @param session the capture session of interest
  *
- * @return <ul><li>
- *             {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
- *             if it is not NULL.</li>
+ * @return <ul><li>{@link ACAMERA_OK} if the method succeeds.</li>
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
  *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
@@ -617,9 +615,7 @@
  *
  * @param session the capture session of interest
  *
- * @return <ul><li>
- *             {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
- *             if it is not NULL.</li>
+ * @return <ul><li> {@link ACAMERA_OK} if the method succeeds</li>
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
  *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index c97059d..1817490 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -91,7 +91,6 @@
     ACAMERA_AUTOMOTIVE_LENS,
     ACAMERA_EXTENSION,
     ACAMERA_JPEGR,
-    ACAMERA_EFV,
     ACAMERA_SECTION_COUNT,
 
     ACAMERA_VENDOR = 0x8000
@@ -139,7 +138,6 @@
     ACAMERA_AUTOMOTIVE_LENS_START  = ACAMERA_AUTOMOTIVE_LENS   << 16,
     ACAMERA_EXTENSION_START        = ACAMERA_EXTENSION         << 16,
     ACAMERA_JPEGR_START            = ACAMERA_JPEGR             << 16,
-    ACAMERA_EFV_START              = ACAMERA_EFV               << 16,
     ACAMERA_VENDOR_START           = ACAMERA_VENDOR            << 16
 } acamera_metadata_section_start_t;
 
@@ -603,7 +601,7 @@
      * ACAMERA_SENSOR_FRAME_DURATION.</p>
      * <p>Note that the actual achievable max framerate also depends on the minimum frame
      * duration of the output streams. The max frame rate will be
-     * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations)</code>. For example,
+     * <code>min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations))</code>. For example,
      * if the application sets this key to <code>{60, 60}</code>, but the maximum minFrameDuration among
      * all configured streams is 33ms, the maximum framerate won't be 60fps, but will be
      * 30fps.</p>
@@ -646,9 +644,14 @@
      * be made, and for firing pre-capture flash pulses to estimate
      * scene brightness and required final capture flash power, when
      * the flash is enabled.</p>
-     * <p>Normally, this entry should be set to START for only a
-     * single request, and the application should wait until the
-     * sequence completes before starting a new one.</p>
+     * <p>Flash is enabled during precapture sequence when:</p>
+     * <ul>
+     * <li>AE mode is ON_ALWAYS_FLASH</li>
+     * <li>AE mode is ON_AUTO_FLASH and the scene is deemed too dark without flash, or</li>
+     * <li>AE mode is ON and flash mode is TORCH or SINGLE</li>
+     * </ul>
+     * <p>Normally, this entry should be set to START for only single request, and the
+     * application should wait until the sequence completes before starting a new one.</p>
      * <p>When a precapture metering sequence is finished, the camera device
      * may lock the auto-exposure routine internally to be able to accurately expose the
      * subsequent still capture image (<code>ACAMERA_CONTROL_CAPTURE_INTENT == STILL_CAPTURE</code>).
@@ -2289,8 +2292,7 @@
      * boost when the light level threshold is exceeded.</p>
      * <p>This state indicates when low light boost is 'ACTIVE' and applied. Similarly, it can
      * indicate when it is not being applied by returning 'INACTIVE'.</p>
-     * <p>This key will be absent from the CaptureResult if AE mode is not set to
-     * 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
+     * <p>The default value will always be 'INACTIVE'.</p>
      */
     ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE =                     // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
             ACAMERA_CONTROL_START + 59,
@@ -2426,35 +2428,46 @@
      * </ul></p>
      *
      * <p>Flash strength level to use in capture mode i.e. when the applications control
-     * flash with either SINGLE or TORCH mode.</p>
-     * <p>Use android.flash.info.singleStrengthMaxLevel and
-     * android.flash.info.torchStrengthMaxLevel to check whether the device supports
+     * flash with either <code>SINGLE</code> or <code>TORCH</code> mode.</p>
+     * <p>Use ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL to check whether the device supports
      * flash strength control or not.
-     * If the values of android.flash.info.singleStrengthMaxLevel and
-     * android.flash.info.torchStrengthMaxLevel are greater than 1,
+     * If the values of ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL are greater than 1,
      * then the device supports manual flash strength control.</p>
-     * <p>If the ACAMERA_FLASH_MODE <code>==</code> TORCH the value must be &gt;= 1
-     * and &lt;= android.flash.info.torchStrengthMaxLevel.
+     * <p>If the ACAMERA_FLASH_MODE <code>==</code> <code>TORCH</code> the value must be &gt;= 1
+     * and &lt;= ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL.
      * If the application doesn't set the key and
-     * android.flash.info.torchStrengthMaxLevel &gt; 1,
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL &gt; 1,
      * then the flash will be fired at the default level set by HAL in
-     * android.flash.info.torchStrengthDefaultLevel.
-     * If the ACAMERA_FLASH_MODE <code>==</code> SINGLE, then the value must be &gt;= 1
-     * and &lt;= android.flash.info.singleStrengthMaxLevel.
+     * ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL.
+     * If the ACAMERA_FLASH_MODE <code>==</code> <code>SINGLE</code>, then the value must be &gt;= 1
+     * and &lt;= ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL.
      * If the application does not set this key and
-     * android.flash.info.singleStrengthMaxLevel &gt; 1,
+     * ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL &gt; 1,
      * then the flash will be fired at the default level set by HAL
-     * in android.flash.info.singleStrengthDefaultLevel.
-     * If ACAMERA_CONTROL_AE_MODE is set to any of ON_AUTO_FLASH, ON_ALWAYS_FLASH,
-     * ON_AUTO_FLASH_REDEYE, ON_EXTERNAL_FLASH values, then the strengthLevel will be ignored.</p>
+     * in ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL.
+     * If ACAMERA_CONTROL_AE_MODE is set to any of <code>ON_AUTO_FLASH</code>, <code>ON_ALWAYS_FLASH</code>,
+     * <code>ON_AUTO_FLASH_REDEYE</code>, <code>ON_EXTERNAL_FLASH</code> values, then the strengthLevel will be ignored.</p>
+     * <p>When AE mode is ON and flash mode is TORCH or SINGLE, the application should make sure
+     * the AE mode, flash mode, and flash strength level remain the same between precapture
+     * trigger request and final capture request. The flash strength level being set during
+     * precapture sequence is used by the camera device as a reference. The actual strength
+     * may be less, and the auto-exposure routine makes sure proper conversions of sensor
+     * exposure time and sensitivities between precapture and final capture for the specified
+     * strength level.</p>
      *
      * @see ACAMERA_CONTROL_AE_MODE
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_STRENGTH_LEVEL =                              // int32
             ACAMERA_FLASH_START + 6,
     /**
-     * <p>Maximum flash brightness level for manual flash control in SINGLE mode.</p>
+     * <p>Maximum flash brightness level for manual flash control in <code>SINGLE</code> mode.</p>
      *
      * <p>Type: int32</p>
      *
@@ -2464,7 +2477,7 @@
      * </ul></p>
      *
      * <p>Maximum flash brightness level in camera capture mode and
-     * ACAMERA_FLASH_MODE set to SINGLE.
+     * ACAMERA_FLASH_MODE set to <code>SINGLE</code>.
      * Value will be &gt; 1 if the manual flash strength control feature is supported,
      * otherwise the value will be equal to 1.
      * Note that this level is just a number of supported levels (the granularity of control).
@@ -2475,7 +2488,7 @@
     ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL =                   // int32
             ACAMERA_FLASH_START + 7,
     /**
-     * <p>Default flash brightness level for manual flash control in SINGLE mode.</p>
+     * <p>Default flash brightness level for manual flash control in <code>SINGLE</code> mode.</p>
      *
      * <p>Type: int32</p>
      *
@@ -2485,14 +2498,16 @@
      * </ul></p>
      *
      * <p>If flash unit is available this will be greater than or equal to 1 and less
-     * or equal to <code>android.flash.info.singleStrengthMaxLevel</code>.
+     * or equal to ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL.
      * Note for devices that do not support the manual flash strength control
      * feature, this level will always be equal to 1.</p>
+     *
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL =               // int32
             ACAMERA_FLASH_START + 8,
     /**
-     * <p>Maximum flash brightness level for manual flash control in TORCH mode</p>
+     * <p>Maximum flash brightness level for manual flash control in <code>TORCH</code> mode</p>
      *
      * <p>Type: int32</p>
      *
@@ -2502,22 +2517,24 @@
      * </ul></p>
      *
      * <p>Maximum flash brightness level in camera capture mode and
-     * ACAMERA_FLASH_MODE set to TORCH.
+     * ACAMERA_FLASH_MODE set to <code>TORCH</code>.
      * Value will be &gt; 1 if the manual flash strength control feature is supported,
      * otherwise the value will be equal to 1.</p>
      * <p>Note that this level is just a number of supported levels(the granularity of control).
      * There is no actual physical power units tied to this level.
-     * There is no relation between android.flash.info.torchStrengthMaxLevel and
-     * android.flash.info.singleStrengthMaxLevel i.e. the ratio of
-     * android.flash.info.torchStrengthMaxLevel:android.flash.info.singleStrengthMaxLevel
+     * There is no relation between ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL and
+     * ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL i.e. the ratio of
+     * ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL:ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
      * is not guaranteed to be the ratio of actual brightness.</p>
      *
      * @see ACAMERA_FLASH_MODE
+     * @see ACAMERA_FLASH_SINGLE_STRENGTH_MAX_LEVEL
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL =                    // int32
             ACAMERA_FLASH_START + 9,
     /**
-     * <p>Default flash brightness level for manual flash control in TORCH mode</p>
+     * <p>Default flash brightness level for manual flash control in <code>TORCH</code> mode</p>
      *
      * <p>Type: int32</p>
      *
@@ -2527,9 +2544,11 @@
      * </ul></p>
      *
      * <p>If flash unit is available this will be greater than or equal to 1 and less
-     * or equal to android.flash.info.torchStrengthMaxLevel.
+     * or equal to ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL.
      * Note for the devices that do not support the manual flash strength control feature,
      * this level will always be equal to 1.</p>
+     *
+     * @see ACAMERA_FLASH_TORCH_STRENGTH_MAX_LEVEL
      */
     ACAMERA_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL =                // int32
             ACAMERA_FLASH_START + 10,
@@ -5863,10 +5882,16 @@
      *   <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
      * </ul></p>
      *
-     * <p>If TRUE, all images produced by the camera device in the RAW image formats will
-     * have lens shading correction already applied to it. If FALSE, the images will
-     * not be adjusted for lens shading correction.
-     * See android.request.maxNumOutputRaw for a list of RAW image formats.</p>
+     * <p>If <code>true</code>, all images produced by the camera device in the <code>RAW</code> image formats will have
+     * at least some lens shading correction already applied to it. If <code>false</code>, the images will
+     * not be adjusted for lens shading correction.  See android.request.maxNumOutputRaw for a
+     * list of RAW image formats.</p>
+     * <p>When <code>true</code>, the <code>lensShadingCorrectionMap</code> key may still have values greater than 1.0,
+     * and those will need to be applied to any captured RAW frames for them to match the shading
+     * correction of processed buffers such as <code>YUV</code> or <code>JPEG</code> images. This may occur, for
+     * example, when some basic fixed lens shading correction is applied by hardware to RAW data,
+     * and additional correction is done dynamically in the camera processing pipeline after
+     * demosaicing.</p>
      * <p>This key will be <code>null</code> for all devices do not report this information.
      * Devices with RAW capability will always report this information in this key.</p>
      */
@@ -8292,9 +8317,9 @@
      * FPS.</p>
      * <p>If the session configuration is not supported, the AE mode reported in the
      * CaptureResult will be 'ON' instead of 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'.</p>
-     * <p>The application can observe the CapturerResult field
-     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE to determine when low light boost is 'ACTIVE' or
-     * 'INACTIVE'.</p>
+     * <p>When this AE mode is enabled, the CaptureResult field
+     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will indicate when low light boost is 'ACTIVE'
+     * or 'INACTIVE'. By default ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will be 'INACTIVE'.</p>
      * <p>The low light boost is 'ACTIVE' once the scene lighting condition is less than the
      * upper bound lux value defined by ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE.
      * This mode will be 'INACTIVE' once the scene lighting condition is greater than the
@@ -11546,7 +11571,6 @@
 
 
 
-
 __END_DECLS
 
 #endif /* _NDK_CAMERA_METADATA_TAGS_H */
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
index 099786b..cdba8ff 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.cpp
@@ -574,7 +574,7 @@
 void CameraManagerGlobal::onStatusChangedLocked(
         const CameraDeviceStatus &status, const std::string &cameraId) {
     if (!validStatus(status)) {
-        ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+        ALOGE("%s: Invalid status %d", __FUNCTION__, static_cast<int>(status));
         return;
     }
 
@@ -629,7 +629,7 @@
         const CameraDeviceStatus &status, const std::string& cameraId,
         const std::string& physicalCameraId) {
     if (!validStatus(status)) {
-        ALOGE("%s: Invalid status %d", __FUNCTION__, status);
+        ALOGE("%s: Invalid status %d", __FUNCTION__, static_cast<int>(status));
         return;
     }
 
@@ -643,7 +643,8 @@
     if (logicalCamStatus != CameraDeviceStatus::STATUS_PRESENT &&
             logicalCamStatus != CameraDeviceStatus::STATUS_NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalCameraId.c_str(), status, logicalCamStatus);
+              __FUNCTION__, physicalCameraId.c_str(), static_cast<int>(status),
+              static_cast<int>(logicalCamStatus));
         return;
     }
 
@@ -866,6 +867,25 @@
     return status == OK ? ACAMERA_OK : ACAMERA_ERROR_METADATA_NOT_FOUND;
 }
 
-ACameraManager::~ACameraManager() {
+void ACameraManager::registerAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->registerAvailabilityCallback(callback);
+}
 
+void ACameraManager::unregisterAvailabilityCallback(
+        const ACameraManager_AvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterAvailabilityCallback(callback);
+}
+
+void ACameraManager::registerExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->registerExtendedAvailabilityCallback(callback);
+}
+
+void ACameraManager::unregisterExtendedAvailabilityCallback(
+        const ACameraManager_ExtendedAvailabilityCallbacks* callback) {
+    mGlobalManager->unregisterExtendedAvailabilityCallback(callback);
+}
+
+ACameraManager::~ACameraManager() {
 }
diff --git a/camera/ndk/ndk_vendor/impl/ACameraManager.h b/camera/ndk/ndk_vendor/impl/ACameraManager.h
index 85acee7..2d8eefa 100644
--- a/camera/ndk/ndk_vendor/impl/ACameraManager.h
+++ b/camera/ndk/ndk_vendor/impl/ACameraManager.h
@@ -265,6 +265,12 @@
                                ACameraDevice_StateCallbacks* callback,
                                /*out*/ACameraDevice** device);
     camera_status_t getTagFromName(const char *cameraId, const char *name, uint32_t *tag);
+    void registerAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void unregisterAvailabilityCallback(const ACameraManager_AvailabilityCallbacks* callback);
+    void registerExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
+    void unregisterExtendedAvailabilityCallback(
+            const ACameraManager_ExtendedAvailabilityCallbacks* callback);
 
   private:
     enum {
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
index 9aaac6a..484335a 100644
--- a/camera/tests/Android.bp
+++ b/camera/tests/Android.bp
@@ -29,6 +29,7 @@
         "CameraCharacteristicsPermission.cpp",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "liblog",
         "libutils",
         "libcutils",
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index e5f99be..5135b5d 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -33,6 +33,7 @@
 #include <hardware/gralloc.h>
 
 #include <camera/CameraMetadata.h>
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/ICameraServiceListener.h>
 #include <android/hardware/BnCameraServiceListener.h>
@@ -46,6 +47,7 @@
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
@@ -347,7 +349,11 @@
     binder::Status res;
 
     int32_t numCameras = 0;
-    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId,
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.packageName = "meeeeeeeee!";
+    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution,
             /*devicePolicy*/0, &numCameras);
     EXPECT_TRUE(res.isOk()) << res;
     EXPECT_LE(0, numCameras);
@@ -360,7 +366,7 @@
 
     EXPECT_EQ(numCameras, static_cast<const int>(statuses.size()));
     for (const auto &it : statuses) {
-        listener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
+        listener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
     }
 
     for (int32_t i = 0; i < numCameras; i++) {
@@ -379,7 +385,7 @@
         CameraMetadata metadata;
         res = service->getCameraCharacteristics(cameraId,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                clientAttribution, /*devicePolicy*/0, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -393,10 +399,10 @@
         // Check connect binder calls
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        res = service->connectDevice(callbacks, cameraId, "meeeeeeeee!",
-                {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+        res = service->connectDevice(callbacks, cameraId,
+                /*oomScoreOffset*/ 0,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, /*out*/&device);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -406,12 +412,12 @@
         if (torchStatus == hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF) {
             // Check torch calls
             res = service->setTorchMode(cameraId,
-                    /*enabled*/true, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
+                    /*enabled*/true, callbacks, clientAttribution, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON, i));
             res = service->setTorchMode(cameraId,
-                    /*enabled*/false, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
+                    /*enabled*/false, callbacks, clientAttribution, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF, i));
@@ -437,10 +443,14 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         {
             SCOPED_TRACE("openNewDevice");
-            binder::Status res = service->connectDevice(callbacks, deviceId, "meeeeeeeee!",
-                    {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+            AttributionSourceState clientAttribution;
+            clientAttribution.deviceId = kDefaultDeviceId;
+            clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+            clientAttribution.packageName = "meeeeeeeee!";
+            binder::Status res = service->connectDevice(callbacks, deviceId,
+                    /*oomScoreOffset*/ 0,
                     /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                    /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+                    /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
                     /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
@@ -473,11 +483,13 @@
         serviceListener = new TestCameraServiceListener();
         std::vector<hardware::CameraStatus> statuses;
         service->addListener(serviceListener, &statuses);
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         for (const auto &it : statuses) {
-            serviceListener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
+            serviceListener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
         }
         service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
-                kDefaultDeviceId, /*devicePolicy*/0, &numCameras);
+                clientAttribution, /*devicePolicy*/0, &numCameras);
     }
 
     virtual void TearDown() {
@@ -507,6 +519,23 @@
 
         // Setup a buffer queue; I'm just using the vendor opaque format here as that is
         // guaranteed to be present
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 2, /*controlledByApp*/ true);
+        EXPECT_TRUE(opaqueConsumer.get() != nullptr);
+        opaqueConsumer->setName(String8("nom nom nom"));
+
+        // Set to VGA dimens for default, as that is guaranteed to be present
+        EXPECT_EQ(OK, opaqueConsumer->setDefaultBufferSize(640, 480));
+        EXPECT_EQ(OK,
+                  opaqueConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED));
+
+        sp<Surface> surface = opaqueConsumer->getSurface();
+
+        sp<IGraphicBufferProducer> producer = surface->getIGraphicBufferProducer();
+        std::string noPhysicalId;
+        OutputConfiguration output(producer, /*rotation*/ 0, noPhysicalId);
+#else
         sp<IGraphicBufferProducer> gbProducer;
         sp<IGraphicBufferConsumer> gbConsumer;
         BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
@@ -523,6 +552,7 @@
 
         std::string noPhysicalId;
         OutputConfiguration output(gbProducer, /*rotation*/0, noPhysicalId);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         // Can we configure?
         res = device->beginConfigure();
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 10f7f22..9204eb1 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -47,8 +48,10 @@
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+            hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
             &numCameras);
     EXPECT_TRUE(rc.isOk());
 }
@@ -73,9 +76,11 @@
 
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 56fcfa4..2740d09 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -84,8 +85,10 @@
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+            hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
             &numCameras);
     EXPECT_TRUE(rc.isOk());
 
@@ -183,9 +186,11 @@
         }
 
         CameraMetadata metadata;
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                clientAttribution, /*devicePolicy*/0, &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -209,11 +214,12 @@
             continue;
         }
 
+        clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+        clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+        clientAttribution.packageName = "ZSLTest";
         rc = mCameraService->connect(this, cameraId,
-                "ZSLTest", hardware::ICameraService::USE_CALLING_UID,
-                hardware::ICameraService::USE_CALLING_PID,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, kDefaultDeviceId,
+                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, clientAttribution,
                 /*devicePolicy*/0, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index bd97c39..3b6413c 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -31,6 +31,7 @@
     ],
     shared_libs: [
         "camera_platform_flags_c_lib",
+        "framework-permission-aidl-cpp",
         "libbase",
         "libcutils",
         "libutils",
diff --git a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
index 12b5bc3..c00f2ba 100644
--- a/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_c2ConcurrentCamera_fuzzer.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <camera2/ConcurrentCamera.h>
+#include <CameraUtils.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include "camera2common.h"
 
@@ -33,7 +34,8 @@
         size_t concurrentCameraIdSize = fdp.ConsumeIntegralInRange<size_t>(kRangeMin, kRangeMax);
         for (size_t idx = 0; idx < concurrentCameraIdSize; ++idx) {
             string concurrentCameraId = fdp.ConsumeRandomLengthString();
-            camIdCombination.mConcurrentCameraIds.push_back(concurrentCameraId);
+            camIdCombination.mConcurrentCameraIdDeviceIdPairs.push_back(
+                    {concurrentCameraId, kDefaultDeviceId});
         }
     }
 
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index 0812096..f46d246 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -17,6 +17,7 @@
 #include <Camera.h>
 #include <CameraParameters.h>
 #include <CameraUtils.h>
+#include <android/content/AttributionSourceState.h>
 #include <binder/MemoryDealer.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <gui/Surface.h>
@@ -89,6 +90,7 @@
     bool initCamera();
     void invokeCamera();
     void invokeSetParameters();
+    native_handle_t* createNativeHandle();
     sp<Camera> mCamera = nullptr;
     FuzzedDataProvider* mFDP = nullptr;
 
@@ -103,6 +105,18 @@
     };
 };
 
+native_handle_t* CameraFuzzer::createNativeHandle() {
+    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kMinElements, kMaxElements);
+    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
+    native_handle_t* handle = native_handle_create(numFds, numInts);
+    for (int32_t i = 0; i < numFds; ++i) {
+        std::string filename = mFDP->ConsumeRandomLengthString(kMaxBytes);
+        int32_t fd = open(filename.c_str(), O_RDWR | O_CREAT | O_TRUNC);
+        handle->data[i] = fd;
+    }
+    return handle;
+}
+
 bool CameraFuzzer::initCamera() {
     ProcessState::self()->startThreadPool();
     sp<IServiceManager> sm = defaultServiceManager();
@@ -110,21 +124,24 @@
     sp<ICameraService> cameraService = nullptr;
     cameraService = interface_cast<ICameraService>(binder);
     sp<ICamera> cameraDevice = nullptr;
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     if (mFDP->ConsumeBool()) {
-        cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */, "CAMERAFUZZ",
-                               hardware::ICameraService::USE_CALLING_UID,
-                               hardware::ICameraService::USE_CALLING_PID,
+        clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+        clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+        clientAttribution.packageName = "CAMERAFUZZ";
+        cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
                                /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                                /*overrideToPortrait*/ false, /*forceSlowJpegMode*/ false,
-                               kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
+                               clientAttribution, /*devicePolicy*/0, &cameraDevice);
     } else {
+        clientAttribution.uid = mFDP->ConsumeIntegral<int8_t>();
+        clientAttribution.pid = mFDP->ConsumeIntegral<int8_t>();
+        clientAttribution.packageName = mFDP->ConsumeRandomLengthString(kMaxBytes).c_str();
         cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
-                               mFDP->ConsumeRandomLengthString(kMaxBytes).c_str(),
-                               mFDP->ConsumeIntegral<int8_t>() /* clientUid */,
-                               mFDP->ConsumeIntegral<int8_t>() /* clientPid */,
                                /*targetSdkVersion*/ mFDP->ConsumeIntegral<int32_t>(),
                                /*overrideToPortrait*/ mFDP->ConsumeBool(),
-                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), kDefaultDeviceId,
+                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), clientAttribution,
                                /*devicePolicy*/0, &cameraDevice);
     }
 
@@ -152,13 +169,15 @@
     }
 
     int32_t cameraId = mFDP->ConsumeIntegral<int32_t>();
-    Camera::getNumberOfCameras(kDefaultDeviceId, /*devicePolicy*/0);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    Camera::getNumberOfCameras(clientAttribution, /*devicePolicy*/0);
     CameraInfo cameraInfo;
     cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
                                             : mFDP->ConsumeIntegral<int32_t>();
     cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
                                                  : mFDP->ConsumeIntegral<int32_t>();
-    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, clientAttribution,
                           /*devicePolicy*/0, &cameraInfo);
     mCamera->reconnect();
 
@@ -291,15 +310,11 @@
                 },
                 [&]() {
                     int64_t timestamp = mFDP->ConsumeIntegral<int64_t>();
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->recordingFrameHandleCallbackTimestamp(timestamp, handle);
                 },
                 [&]() {
-                    int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                    native_handle_t* handle = native_handle_create(numFds, numInts);
+                    native_handle_t* handle = createNativeHandle();
                     mCamera->releaseRecordingFrameHandle(handle);
                 },
                 [&]() { mCamera->releaseRecordingFrame(iMem); },
@@ -308,9 +323,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     mCamera->releaseRecordingFrameHandleBatch(handles);
@@ -320,9 +333,7 @@
                     for (int8_t i = 0;
                          i < mFDP->ConsumeIntegralInRange<int8_t>(kMinElements, kMaxElements);
                          ++i) {
-                        int32_t numFds = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        int32_t numInts = mFDP->ConsumeIntegralInRange<int32_t>(kNumMin, kNumMax);
-                        native_handle_t* handle = native_handle_create(numFds, numInts);
+                        native_handle_t* handle = createNativeHandle();
                         handles.push_back(handle);
                     }
                     std::vector<nsecs_t> timestamps;
diff --git a/cmds/screenrecord/FrameOutput.cpp b/cmds/screenrecord/FrameOutput.cpp
index ee7ace6..6388518 100644
--- a/cmds/screenrecord/FrameOutput.cpp
+++ b/cmds/screenrecord/FrameOutput.cpp
@@ -15,11 +15,14 @@
  */
 
 #define LOG_TAG "ScreenRecord"
-//#define LOG_NDEBUG 0
-#include <utils/Log.h>
 
+//#define LOG_NDEBUG 0
+
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <GLES2/gl2.h>
 #include <GLES2/gl2ext.h>
+#include <utils/Log.h>
 
 #include "FrameOutput.h"
 
@@ -67,11 +70,17 @@
         return UNKNOWN_ERROR;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mGlConsumer = new GLConsumer(mExtTextureName, GL_TEXTURE_EXTERNAL_OES, /*useFenceSync=*/true,
+                                 /*isControlledByApp=*/false);
+    auto producer = mGlConsumer->getSurface()->getIGraphicBufferProducer();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
     mGlConsumer = new GLConsumer(consumer, mExtTextureName,
                 GL_TEXTURE_EXTERNAL_OES, true, false);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mGlConsumer->setName(String8("virtual display"));
     mGlConsumer->setDefaultBufferSize(width, height);
     producer->setMaxDequeuedBufferCount(4);
diff --git a/cmds/screenrecord/Overlay.cpp b/cmds/screenrecord/Overlay.cpp
index a19ef8e..727f16a 100644
--- a/cmds/screenrecord/Overlay.cpp
+++ b/cmds/screenrecord/Overlay.cpp
@@ -172,10 +172,16 @@
         return UNKNOWN_ERROR;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mGlConsumer = new GLConsumer(mExtTextureName, GL_TEXTURE_EXTERNAL_OES, /*useFenceSync=*/true,
+                                 /*isControlledByApp=*/false);
+    mProducer = mGlConsumer->getSurface()->getIGraphicBufferProducer();
+#else
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&mProducer, &consumer);
     mGlConsumer = new GLConsumer(consumer, mExtTextureName,
                 GL_TEXTURE_EXTERNAL_OES, true, false);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mGlConsumer->setName(String8("virtual display"));
     mGlConsumer->setDefaultBufferSize(width, height);
     mProducer->setMaxDequeuedBufferCount(4);
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 28670b1..de925b8 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -362,13 +362,26 @@
         const ui::DisplayState& displayState,
         const sp<IGraphicBufferProducer>& bufferProducer,
         sp<IBinder>* pDisplayHandle, sp<SurfaceControl>* mirrorRoot) {
-    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
-            String8("ScreenRecorder"), gSecureDisplay);
+    std::string displayName = gPhysicalDisplayId
+      ? "ScreenRecorder " + to_string(*gPhysicalDisplayId)
+      : "ScreenRecorder";
+    static const std::string kDisplayName(displayName);
+
+    sp<IBinder> dpy = SurfaceComposerClient::createVirtualDisplay(kDisplayName, gSecureDisplay);
     SurfaceComposerClient::Transaction t;
     t.setDisplaySurface(dpy, bufferProducer);
     setDisplayProjection(t, dpy, displayState);
+
+    // ensures that random layer stack assigned to virtual display changes
+    // between calls - if a list of displays with their layer stacks becomes
+    // available, we should use it to ensure a new layer stack is used here
+    std::srand(
+      std::chrono::duration_cast<std::chrono::milliseconds>(
+        std::chrono::system_clock::now().time_since_epoch()
+       ).count());
     ui::LayerStack layerStack = ui::LayerStack::fromValue(std::rand());
     t.setDisplayLayerStack(dpy, layerStack);
+
     PhysicalDisplayId displayId;
     status_t err = getPhysicalDisplayId(displayId);
     if (err != NO_ERROR) {
@@ -797,7 +810,7 @@
     sp<Overlay> overlay;
 
     ~RecordingData() {
-        if (dpy != nullptr) SurfaceComposerClient::destroyDisplay(dpy);
+        if (dpy != nullptr) SurfaceComposerClient::destroyVirtualDisplay(dpy);
         if (overlay != nullptr) overlay->stop();
         if (encoder != nullptr) {
             encoder->stop();
@@ -1224,6 +1237,8 @@
         "    see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
         "--verbose\n"
         "    Display interesting information on stdout.\n"
+        "--version\n"
+        "    Show Android screenrecord version.\n"
         "--help\n"
         "    Show this message.\n"
         "\n"
@@ -1255,6 +1270,7 @@
         { "bframes",            required_argument,  NULL, 'B' },
         { "display-id",         required_argument,  NULL, 'd' },
         { "capture-secure",     no_argument,        NULL, 'S' },
+        { "version",            no_argument,        NULL, 'x' },
         { NULL,                 0,                  NULL, 0 }
     };
 
@@ -1377,6 +1393,9 @@
         case 'S':
             gSecureDisplay = true;
             break;
+        case 'x':
+            fprintf(stderr, "%d.%d\n", kVersionMajor, kVersionMinor);
+            return 0;
         default:
             if (ic != '?') {
                 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
diff --git a/cmds/screenrecord/screenrecord.h b/cmds/screenrecord/screenrecord.h
index cec7c13..57826b0 100644
--- a/cmds/screenrecord/screenrecord.h
+++ b/cmds/screenrecord/screenrecord.h
@@ -18,6 +18,6 @@
 #define SCREENRECORD_SCREENRECORD_H
 
 #define kVersionMajor 1
-#define kVersionMinor 3
+#define kVersionMinor 4
 
 #endif /*SCREENRECORD_SCREENRECORD_H*/
diff --git a/cmds/stagefright/stagefright.cpp b/cmds/stagefright/stagefright.cpp
index f26e3a8..1a6e5e8 100644
--- a/cmds/stagefright/stagefright.cpp
+++ b/cmds/stagefright/stagefright.cpp
@@ -60,6 +60,7 @@
 
 #include <private/media/VideoFrame.h>
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/GLConsumer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
@@ -1133,7 +1134,12 @@
             CHECK(gSurface != NULL);
         } else {
             CHECK(useSurfaceTexAlloc);
-
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+            sp<GLConsumer> texture =
+                    new GLConsumer(0 /* tex */, GLConsumer::TEXTURE_EXTERNAL,
+                                   true /* useFenceSync */, false /* isControlledByApp */);
+            gSurface = texture->getSurface();
+#else
             sp<IGraphicBufferProducer> producer;
             sp<IGraphicBufferConsumer> consumer;
             BufferQueue::createBufferQueue(&producer, &consumer);
@@ -1141,6 +1147,7 @@
                     GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
                     false /* isControlledByApp */);
             gSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         }
     }
 
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index 16ea15e..6e55a16 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -64,10 +64,6 @@
         "libfwdlock-decoder",
     ],
 
-    whole_static_libs: [
-        "libc++fs",
-    ],
-
     local_include_dirs: ["include"],
 
     relative_install_path: "drm",
diff --git a/drm/libmediadrmrkp/Android.bp b/drm/libmediadrmrkp/Android.bp
index b1a01e4..88f0571 100644
--- a/drm/libmediadrmrkp/Android.bp
+++ b/drm/libmediadrmrkp/Android.bp
@@ -14,6 +14,7 @@
     ],
     static_libs: [
         "android.hardware.common-V2-ndk",
+        "android.hardware.drm.common-V1-ndk",
         "android.hardware.drm-V1-ndk",
         "android.hardware.security.rkp-V3-ndk",
         "libbase",
@@ -39,6 +40,7 @@
     ],
     static_libs: [
         "android.hardware.common-V2-ndk",
+        "android.hardware.drm.common-V1-ndk",
         "android.hardware.drm-V1-ndk",
         "android.hardware.security.rkp-V3-ndk",
         "libbase",
diff --git a/drm/libmediadrmrkp/include/DrmRemotelyProvisionedComponent.h b/drm/libmediadrmrkp/include/DrmRemotelyProvisionedComponent.h
index f046785..97a8cc4 100644
--- a/drm/libmediadrmrkp/include/DrmRemotelyProvisionedComponent.h
+++ b/drm/libmediadrmrkp/include/DrmRemotelyProvisionedComponent.h
@@ -35,7 +35,8 @@
 class DrmRemotelyProvisionedComponent : public BnRemotelyProvisionedComponent {
   public:
     DrmRemotelyProvisionedComponent(std::shared_ptr<IDrmPlugin> drm, std::string drmVendor,
-                                    std::string drmDesc, std::vector<uint8_t> bcc);
+                                    std::string drmDesc, std::vector<uint8_t> bcc,
+                                    std::vector<uint8_t> bcc_signature);
     ScopedAStatus getHardwareInfo(RpcHardwareInfo* info) override;
 
     ScopedAStatus generateEcdsaP256KeyPair(bool testMode, MacedPublicKey* macedPublicKey,
@@ -60,6 +61,7 @@
     std::string mDrmVendor;
     std::string mDrmDesc;
     std::vector<uint8_t> mBcc;
+    std::vector<uint8_t> mBccSignature;
 };
 }  // namespace android::mediadrm
 
diff --git a/drm/libmediadrmrkp/src/DrmRemotelyProvisionedComponent.cpp b/drm/libmediadrmrkp/src/DrmRemotelyProvisionedComponent.cpp
index 440be79..65054b0 100644
--- a/drm/libmediadrmrkp/src/DrmRemotelyProvisionedComponent.cpp
+++ b/drm/libmediadrmrkp/src/DrmRemotelyProvisionedComponent.cpp
@@ -28,11 +28,13 @@
 DrmRemotelyProvisionedComponent::DrmRemotelyProvisionedComponent(std::shared_ptr<IDrmPlugin> drm,
                                                                  std::string drmVendor,
                                                                  std::string drmDesc,
-                                                                 std::vector<uint8_t> bcc)
+                                                                 std::vector<uint8_t> bcc,
+                                                                 std::vector<uint8_t> bcc_signature)
     : mDrm(std::move(drm)),
       mDrmVendor(std::move(drmVendor)),
       mDrmDesc(std::move(drmDesc)),
-      mBcc(std::move(bcc)) {}
+      mBcc(std::move(bcc)),
+      mBccSignature(std::move(bcc_signature)) {}
 
 ScopedAStatus DrmRemotelyProvisionedComponent::getHardwareInfo(RpcHardwareInfo* info) {
     info->versionNumber = 3;
@@ -107,7 +109,7 @@
     for (auto i : keyToProp) {
         auto key = i.first;
         auto prop = i.second;
-        const auto& val= deviceInfoMap.get(key);
+        const auto& val = deviceInfoMap.get(key);
         if (val == nullptr || val->asTstr()->value().empty()) {
             std::string propValue = android::base::GetProperty(prop, "");
             if (propValue.empty()) {
@@ -161,12 +163,16 @@
     }
 
     // assemble AuthenticatedRequest (definition in IRemotelyProvisionedComponent.aidl)
-    *out = cppbor::Array()
-                   .add(1 /* version */)
-                   .add(cppbor::Map() /* UdsCerts */)
-                   .add(cppbor::EncodedItem(mBcc))
-                   .add(cppbor::EncodedItem(std::move(deviceSignedCsrPayload)))
-                   .encode();
+    cppbor::Array request_array = cppbor::Array().add(1 /* version */);
+    if (!mBccSignature.empty()) {
+        request_array.add(cppbor::EncodedItem(mBccSignature) /* UdsCerts */);
+    } else {
+        request_array.add(cppbor::Map() /* empty UdsCerts */);
+    }
+    request_array.add(cppbor::EncodedItem(mBcc))
+            .add(cppbor::EncodedItem(std::move(deviceSignedCsrPayload)));
+    *out = request_array.encode();
+
     return ScopedAStatus::ok();
 }
 }  // namespace android::mediadrm
\ No newline at end of file
diff --git a/drm/libmediadrmrkp/src/DrmRkpAdapter.cpp b/drm/libmediadrmrkp/src/DrmRkpAdapter.cpp
index 515d157..750b51e 100644
--- a/drm/libmediadrmrkp/src/DrmRkpAdapter.cpp
+++ b/drm/libmediadrmrkp/src/DrmRkpAdapter.cpp
@@ -87,13 +87,21 @@
                           status.getDescription().c_str());
                     return;
                 }
-
+                std::vector<uint8_t> bcc_signature;
+                status =
+                        mDrm->getPropertyByteArray("bootCertificateChainSignature", &bcc_signature);
+                if (!status.isOk()) {
+                    ALOGW("mDrm->getPropertyByteArray(\"bootCertificateChainSignature\") failed."
+                          "Detail: [%s].",
+                          status.getDescription().c_str());
+                    // bcc signature is optional, no need to return when it is unavailable.
+                }
                 std::string compName(instance);
                 auto comps = static_cast<
                         std::map<std::string, std::shared_ptr<IRemotelyProvisionedComponent>>*>(
                         context);
                 (*comps)[compName] = ::ndk::SharedRefBase::make<DrmRemotelyProvisionedComponent>(
-                        mDrm, drmVendor, drmDesc, bcc);
+                        mDrm, drmVendor, drmDesc, bcc, bcc_signature);
             });
     return comps;
 }
diff --git a/drm/mediadrm/plugins/clearkey/aidl/Android.bp b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
index 9a06bd2..079e075 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/Android.bp
+++ b/drm/mediadrm/plugins/clearkey/aidl/Android.bp
@@ -11,13 +11,13 @@
     name: "aidl_clearkey_service_defaults-use-shared-deps",
 
     shared_libs: [
+        "android.hardware.drm-V1-ndk",
         "libbase",
         "libbinder_ndk",
         "libcrypto",
         "liblog",
         "libprotobuf-cpp-lite",
         "libutils",
-        "android.hardware.drm-V1-ndk",
     ],
 
     static_libs: [
@@ -40,6 +40,7 @@
 
     static_libs: [
         "android.hardware.common-V2-ndk",
+        "android.hardware.drm.common-V1-ndk",
         "android.hardware.drm-V1-ndk",
         "libbase",
         "libclearkeybase",
@@ -62,7 +63,11 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
 
     include_dirs: ["frameworks/av/include"],
 
@@ -81,7 +86,7 @@
     ],
     srcs: ["Service.cpp"],
     init_rc: ["android.hardware.drm-service.clearkey.rc"],
-    vintf_fragments: ["android.hardware.drm-service.clearkey.xml"],
+    vintf_fragment_modules: ["android.hardware.drm-service.clearkey.xml_vintf"],
 }
 
 cc_binary {
@@ -93,7 +98,13 @@
     overrides: ["android.hardware.drm-service.clearkey"],
     srcs: ["ServiceLazy.cpp"],
     init_rc: ["android.hardware.drm-service-lazy.clearkey.rc"],
-    vintf_fragments: ["android.hardware.drm-service.clearkey.xml"],
+    vintf_fragment_modules: ["android.hardware.drm-service.clearkey.xml_vintf"],
+}
+
+vintf_fragment {
+    name: "android.hardware.drm-service.clearkey.xml_vintf",
+    src: "android.hardware.drm-service.clearkey.xml",
+    vendor: true,
 }
 
 cc_binary {
@@ -137,18 +148,22 @@
 
     relative_install_path: "hw",
 
-    cflags: ["-Wall", "-Werror", "-Wthread-safety"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
 
     include_dirs: ["frameworks/av/include"],
 
     shared_libs: [
+        "android.hardware.drm-V1-ndk",
         "libbase",
         "libbinder_ndk",
         "libcrypto",
         "liblog",
         "libprotobuf-cpp-lite",
         "libutils",
-        "android.hardware.drm-V1-ndk",
     ],
 
     static_libs: [
@@ -192,7 +207,7 @@
     ],
     prebuilts: [
         "android.hardware.drm-service.clearkey.apex.rc",
-        "android.hardware.drm-service.clearkey.xml"
+        "android.hardware.drm-service.clearkey.xml",
     ],
     overrides: [
         "android.hardware.drm-service.clearkey",
@@ -233,11 +248,11 @@
     ],
     prebuilts: [
         "android.hardware.drm-service-lazy.clearkey.apex.rc",
-        "android.hardware.drm-service.clearkey.xml"
+        "android.hardware.drm-service.clearkey.xml",
     ],
     overrides: [
-        "android.hardware.drm-service.clearkey",
         "android.hardware.drm-service-lazy.clearkey",
+        "android.hardware.drm-service.clearkey",
         "com.android.hardware.drm.clearkey",
     ],
 }
diff --git a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
index 31cb7c0..8a93132 100644
--- a/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
+++ b/drm/mediadrm/plugins/clearkey/aidl/DrmPlugin.cpp
@@ -37,7 +37,6 @@
 const int kSecureStopIdStart = 100;
 const std::string kOfflineLicense("\"type\":\"persistent-license\"");
 const std::string kStreaming("Streaming");
-const std::string kTemporaryLicense("\"type\":\"temporary\"");
 const std::string kTrue("True");
 
 const std::string kQueryKeyLicenseType("LicenseType");
diff --git a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
index ddbc594..cd129ac 100644
--- a/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
+++ b/drm/mediadrm/plugins/clearkey/common/JsonWebKey.cpp
@@ -27,10 +27,7 @@
 const std::string kKeyTypeTag("kty");
 const std::string kKeyTag("k");
 const std::string kKeyIdTag("kid");
-const std::string kMediaSessionType("type");
-const std::string kPersistentLicenseSession("persistent-license");
 const std::string kSymmetricKeyValue("oct");
-const std::string kTemporaryLicenseSession("temporary");
 }  // namespace
 
 namespace clearkeydrm {
diff --git a/include/media/Interpolator.h b/include/media/Interpolator.h
index e26290f..5a2ab27 100644
--- a/include/media/Interpolator.h
+++ b/include/media/Interpolator.h
@@ -289,7 +289,7 @@
 
     std::string toString() const {
         std::stringstream ss;
-        ss << "Interpolator{mInterpolatorType=" << static_cast<int32_t>(mInterpolatorType);
+        ss << "Interpolator{mInterpolatorType=" << media::toString(mInterpolatorType);
         ss << ", mFirstSlope=" << mFirstSlope;
         ss << ", mLastSlope=" << mLastSlope;
         ss << ", {";
diff --git a/include/media/VolumeShaper.h b/include/media/VolumeShaper.h
index 6208db3..26da363 100644
--- a/include/media/VolumeShaper.h
+++ b/include/media/VolumeShaper.h
@@ -116,6 +116,16 @@
             TYPE_SCALE,
         };
 
+        static std::string toString(Type type) {
+            switch (type) {
+                case TYPE_ID: return "TYPE_ID";
+                case TYPE_SCALE: return "TYPE_SCALE";
+                default:
+                    return std::string("Unknown Type: ")
+                            .append(std::to_string(static_cast<int>(type)));
+            }
+        }
+
         // Must match with VolumeShaper.java in frameworks/base.
         enum OptionFlag : int32_t {
             OPTION_FLAG_NONE           = 0,
@@ -125,6 +135,22 @@
             OPTION_FLAG_ALL            = (OPTION_FLAG_VOLUME_IN_DBFS | OPTION_FLAG_CLOCK_TIME),
         };
 
+        static std::string toString(OptionFlag flag) {
+            std::string s;
+            for (const auto& flagPair : std::initializer_list<std::pair<OptionFlag, const char*>>{
+                    {OPTION_FLAG_VOLUME_IN_DBFS, "OPTION_FLAG_VOLUME_IN_DBFS"},
+                    {OPTION_FLAG_CLOCK_TIME, "OPTION_FLAG_CLOCK_TIME"},
+                }) {
+                if (flag & flagPair.first) {
+                    if (!s.empty()) {
+                        s.append("|");
+                    }
+                    s.append(flagPair.second);
+                }
+            }
+            return s;
+        }
+
         // Bring from base class; must match with VolumeShaper.java in frameworks/base.
         using InterpolatorType = Interpolator<S, T>::InterpolatorType;
 
@@ -329,10 +355,10 @@
         // Returns a string for debug printing.
         std::string toString() const {
             std::stringstream ss;
-            ss << "VolumeShaper::Configuration{mType=" << static_cast<int32_t>(mType);
+            ss << "VolumeShaper::Configuration{mType=" << toString(mType);
             ss << ", mId=" << mId;
             if (mType != TYPE_ID) {
-                ss << ", mOptionFlags=" << static_cast<int32_t>(mOptionFlags);
+                ss << ", mOptionFlags=" << toString(mOptionFlags);
                 ss << ", mDurationMs=" << mDurationMs;
                 ss << ", " << Interpolator<S, T>::toString().c_str();
             }
@@ -414,6 +440,25 @@
                             | FLAG_CREATE_IF_NECESSARY),
         };
 
+        static std::string toString(Flag flag) {
+            std::string s;
+            for (const auto& flagPair : std::initializer_list<std::pair<Flag, const char*>>{
+                    {FLAG_REVERSE, "FLAG_REVERSE"},
+                    {FLAG_TERMINATE, "FLAG_TERMINATE"},
+                    {FLAG_JOIN, "FLAG_JOIN"},
+                    {FLAG_DELAY, "FLAG_DELAY"},
+                    {FLAG_CREATE_IF_NECESSARY, "FLAG_CREATE_IF_NECESSARY"},
+                }) {
+                if (flag & flagPair.first) {
+                    if (!s.empty()) {
+                        s.append("|");
+                    }
+                    s.append(flagPair.second);
+                }
+            }
+            return s;
+        }
+
         Operation()
             : Operation(FLAG_NONE, -1 /* replaceId */) {
         }
@@ -508,7 +553,7 @@
 
         std::string toString() const {
             std::stringstream ss;
-            ss << "VolumeShaper::Operation{mFlags=" << static_cast<int32_t>(mFlags) ;
+            ss << "VolumeShaper::Operation{mFlags=" << toString(mFlags);
             ss << ", mReplaceId=" << mReplaceId;
             ss << ", mXOffset=" << mXOffset;
             ss << "}";
diff --git a/media/OWNERS b/media/OWNERS
index 976fb9e..b926075 100644
--- a/media/OWNERS
+++ b/media/OWNERS
@@ -14,5 +14,8 @@
 taklee@google.com
 wonsik@google.com
 
+# For TEST_MAPPING tv-presubmit and tv-postsubmit configurations:
+per-file TEST_MAPPING = blindahl@google.com
+
 # go/android-fwk-media-solutions for info on areas of ownership.
 include platform/frameworks/av:/media/janitors/media_solutions_OWNERS
diff --git a/media/TEST_MAPPING b/media/TEST_MAPPING
index cd5d354..695cad6 100644
--- a/media/TEST_MAPPING
+++ b/media/TEST_MAPPING
@@ -44,5 +44,42 @@
             ],
             "file_patterns": ["(?i)drm|crypto"]
         }
+    ],
+    "postsubmit": [
+        {
+            "name": "MctsMediaCodecTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.codec.cts.EncodeDecodeTest"
+                }
+            ]
+        },
+        {
+            "name": "MctsMediaCodecTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.codec.cts.DecodeEditEncodeTest"
+                }
+            ]
+        },
+        {
+            "name": "MctsMediaCodecTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.codec.cts.ExtractDecodeEditEncodeMuxTest"
+                }
+            ]
+        }
+    ],
+    // Postsubmit tests for TV devices
+    "tv-postsubmit": [
+        {
+            "name": "CtsMediaDecoderTestCases",
+            "options": [
+                {
+                    "include-filter": "android.media.decoder.cts.DecoderRenderTest"
+                }
+            ]
+        }
     ]
 }
diff --git a/media/aconfig/codec_fwk.aconfig b/media/aconfig/codec_fwk.aconfig
index b3c02eb..ed1522b 100644
--- a/media/aconfig/codec_fwk.aconfig
+++ b/media/aconfig/codec_fwk.aconfig
@@ -13,6 +13,26 @@
 }
 
 flag {
+  name: "codec_buffer_state_cleanup"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for more buffer state cleanup in MediaCodec"
+  bug: "343502509"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "dataspace_v0_partial"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for using V0 dataspace in some cases"
+  bug: "313827126"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "dynamic_color_aspects"
   is_exported: true
   namespace: "codec_fwk"
@@ -44,6 +64,16 @@
 }
 
 flag {
+  name: "input_surface_throttle"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for input surface throttle"
+  bug: "342269852"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "large_audio_frame_finish"
   namespace: "codec_fwk"
   description: "Implementation flag for large audio frame finishing tasks"
@@ -88,6 +118,16 @@
 }
 
 flag {
+  name: "secure_codecs_require_crypto"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for requiring setting crypto for secure codecs"
+  bug: "365162324"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
   name: "set_callback_stall"
   namespace: "codec_fwk"
   description: "Bugfix flag for setCallback stall"
@@ -101,7 +141,17 @@
   name: "set_state_early"
   namespace: "codec_fwk"
   description: "Bugfix flag for setting state early to avoid a race condition"
-  bug: "298613711"
+  bug: "298613712"
+  metadata {
+    purpose: PURPOSE_BUGFIX
+  }
+}
+
+flag {
+  name: "stop_hal_before_surface"
+  namespace: "codec_fwk"
+  description: "Bugfix flag for setting state early to avoid a race condition"
+  bug: "339247977"
   metadata {
     purpose: PURPOSE_BUGFIX
   }
@@ -113,3 +163,10 @@
   description: "Feature flag to track teamfood population"
   bug: "328770262"
 }
+
+flag {
+  name: "thumbnail_block_model"
+  namespace: "codec_fwk"
+  description: "Feature flag for using block model decoder in thumbnail generation"
+  bug: "329521645"
+}
diff --git a/media/audio/aconfig/Android.bp b/media/audio/aconfig/Android.bp
index ec45e2f..a5aeff2 100644
--- a/media/audio/aconfig/Android.bp
+++ b/media/audio/aconfig/Android.bp
@@ -50,6 +50,23 @@
 }
 
 cc_aconfig_library {
+    name: "com.android.media.audioserver-aconfig-cc-ro",
+    aconfig_declarations: "com.android.media.audioserver-aconfig",
+    defaults: ["audio-aconfig-cc-defaults"],
+    double_loadable: true,
+    host_supported: true,
+    product_available: true,
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+    mode: "force-read-only",
+}
+
+cc_aconfig_library {
     name: "com.android.media.audio-aconfig-cc",
     aconfig_declarations: "com.android.media.audio-aconfig",
     defaults: ["audio-aconfig-cc-defaults"],
@@ -118,6 +135,13 @@
     visibility: ["//frameworks/base/api"],
 }
 
+aconfig_declarations {
+    name: "android.media.soundtrigger-aconfig",
+    package: "android.media.soundtrigger",
+    container: "system",
+    srcs: ["soundtrigger.aconfig"],
+}
+
 java_aconfig_library {
     name: "android.media.audio-aconfig-java",
     aconfig_declarations: "android.media.audio-aconfig",
@@ -141,6 +165,12 @@
     defaults: ["framework-minus-apex-aconfig-java-defaults"],
 }
 
+java_aconfig_library {
+    name: "android.media.soundtrigger-aconfig-java",
+    aconfig_declarations: "android.media.soundtrigger-aconfig",
+    defaults: ["framework-minus-apex-aconfig-java-defaults"],
+}
+
 cc_aconfig_library {
     name: "android.media.audiopolicy-aconfig-cc",
     aconfig_declarations: "android.media.audiopolicy-aconfig",
@@ -153,5 +183,6 @@
         "android.media.audio-aconfig-java",
         "android.media.audiopolicy-aconfig-java",
         "android.media.midi-aconfig-java",
+        "android.media.soundtrigger-aconfig-java",
     ],
 }
diff --git a/media/audio/aconfig/OWNERS b/media/audio/aconfig/OWNERS
new file mode 100644
index 0000000..fb1e866
--- /dev/null
+++ b/media/audio/aconfig/OWNERS
@@ -0,0 +1,4 @@
+# Bug component: 48436
+atneya@google.com
+elaurent@google.com
+include platform/frameworks/av:/media/janitors/audio_OWNERS #{LAST_RESORT_SUGGESTION}
diff --git a/media/audio/aconfig/aaudio.aconfig b/media/audio/aconfig/aaudio.aconfig
index c160109..f9fb4c7 100644
--- a/media/audio/aconfig/aaudio.aconfig
+++ b/media/audio/aconfig/aaudio.aconfig
@@ -11,3 +11,10 @@
     description: "Enable the AAudio sample rate converter."
     bug: "219533889"
 }
+
+flag {
+    name: "start_stop_client_from_command_thread"
+    namespace: "media_audio"
+    description: "Start or stop client from command thread."
+    bug: "341627085"
+}
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 17ce8df..c732708 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -6,6 +6,14 @@
 container: "system"
 
 flag {
+    name: "abs_volume_index_fix"
+    namespace: "media_audio"
+    description:
+        "Fix double attenuation and index jumps in absolute volume mode"
+    bug: "340693050"
+}
+
+flag {
     name: "alarm_min_volume_zero"
     namespace: "media_audio"
     description: "Support configuring alarm min vol to zero"
@@ -20,6 +28,13 @@
 }
 
 flag {
+    name: "audioserver_permissions"
+    namespace: "media_audio"
+    description: "Refactoring permission management in audioserver"
+    bug: "338089555"
+}
+
+flag {
     name: "bluetooth_mac_address_anonymization"
     namespace: "media_audio"
     description:
@@ -45,6 +60,46 @@
 }
 
 flag {
+    name: "equal_sco_lea_vc_index_range"
+    namespace: "media_audio"
+    description:
+        "Introduce the same index range for voice calls over SCO and "
+        "LE audio"
+    bug: "364364777"
+}
+
+flag {
+    name: "music_fx_edge_to_edge"
+    namespace: "media_audio"
+    description: "Enable Edge-to-edge feature for MusicFx and handle insets"
+    bug: "336204940"
+}
+
+flag {
+    name: "port_to_piid_simplification"
+    namespace: "media_audio"
+    description: "PAM only needs for each piid the last portId mapping"
+    bug: "335747248"
+
+}
+
+flag {
+    name: "replace_stream_bt_sco"
+    namespace: "media_audio"
+    description:
+        "Replace internally STREAM_BLUETOOTH_SCO with STREAM_VOICE_CALL"
+    bug: "345024266"
+}
+
+flag {
+    name: "ring_my_car"
+    namespace: "media_audio"
+    description:
+        "Incoming ringtones will not be muted based on ringer mode when connected to a car"
+    bug: "319515324"
+}
+
+flag {
     name: "ringer_mode_affects_alarm"
     namespace: "media_audio"
     description:
diff --git a/media/audio/aconfig/audio_framework.aconfig b/media/audio/aconfig/audio_framework.aconfig
index 0209e28..b8555df 100644
--- a/media/audio/aconfig/audio_framework.aconfig
+++ b/media/audio/aconfig/audio_framework.aconfig
@@ -22,6 +22,13 @@
     bug: "302323921"
 }
 
+flag{
+    name: "enable_ringtone_haptics_customization"
+    namespace: "media_audio"
+    description: "Enables haptic customization for playing ringtone."
+    bug: "351974934"
+}
+
 flag {
     name: "feature_spatial_audio_headtracking_low_latency"
     is_exported: true
@@ -86,6 +93,13 @@
 }
 
 flag {
+    name: "muted_by_port_volume_api"
+    namespace: "media_audio"
+    description: "Playback monitoring flag used when player muted by port volume"
+    bug: "319515324"
+}
+
+flag {
     name: "sco_managed_by_audio"
     is_exported: true
     namespace: "media_audio"
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 5c6504f..1ce4d00 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -15,6 +15,20 @@
 }
 
 flag {
+    name: "effect_chain_callback_improve"
+    namespace: "media_audio"
+    description: "Improve effect chain callback mutex logic."
+    bug: "342413767"
+}
+
+flag {
+    name: "enable_audio_input_device_routing"
+    namespace: "media_audio"
+    description: "Allow audio input devices routing control."
+    bug: "364923030"
+}
+
+flag {
     name: "fdtostring_timeout_fix"
     namespace: "media_audio"
     description: "Improve fdtostring implementation to properly handle timing out."
@@ -22,6 +36,32 @@
 }
 
 flag {
+    name: "fix_call_audio_patch"
+    namespace: "media_audio"
+    description:
+        "optimize creation and release of audio patches for call routing"
+    bug: "292492229"
+}
+
+flag {
+    name: "fix_concurrent_playback_behavior_with_bit_perfect_client"
+    namespace: "media_audio"
+    description:
+        "Treat playback use cases differently when bit-perfect client is active to improve the "
+        "user experience with bit-perfect playback."
+    bug: "339515899"
+}
+
+flag {
+    name: "fix_input_sharing_logic"
+    namespace: "media_audio"
+    description:
+        "Fix the audio policy logic that decides to reuse or close "
+        "input streams when resources are exhausted"
+    bug: "338446410"
+}
+
+flag {
     name: "mutex_priority_inheritance"
     namespace: "media_audio"
     description:
@@ -30,3 +70,27 @@
         "This feature helps reduce audio glitching caused by low priority blocking threads."
     bug: "209491695"
 }
+
+flag {
+    name: "portid_volume_management"
+    namespace: "media_audio"
+    description:
+        "Allows to manage volume by port id within audio flinger instead of legacy stream type."
+    bug: "317212590"
+}
+
+flag {
+    name: "power_stats"
+    namespace: "media_audio"
+    description:
+        "Add power stats tracking and management."
+    bug: "350114693"
+}
+
+flag {
+    name: "use_bt_sco_for_media"
+    namespace: "media_audio"
+    description:
+        "Play media strategy over Bluetooth SCO when active"
+    bug: "292037886"
+}
diff --git a/media/audio/aconfig/soundtrigger.aconfig b/media/audio/aconfig/soundtrigger.aconfig
new file mode 100644
index 0000000..5233119
--- /dev/null
+++ b/media/audio/aconfig/soundtrigger.aconfig
@@ -0,0 +1,23 @@
+# Flags for sound trigger
+#
+# Please add flags in alphabetical order.
+
+package: "android.media.soundtrigger"
+container: "system"
+
+flag {
+    name: "generic_model_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Feature flag for adding GenericSoundModel to SystemApi"
+    bug: "339267254"
+}
+
+flag {
+    name: "manager_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Feature flag for adding SoundTriggerManager API to SystemApi"
+    bug: "339267254"
+}
+
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 77418eb..01b6e42 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -76,6 +76,8 @@
 using media::audio::common::AudioOffloadInfo;
 using media::audio::common::AudioOutputFlags;
 using media::audio::common::AudioPlaybackRate;
+using media::audio::common::AudioPolicyForcedConfig;
+using media::audio::common::AudioPolicyForceUse;
 using media::audio::common::AudioPort;
 using media::audio::common::AudioPortConfig;
 using media::audio::common::AudioPortDeviceExt;
@@ -1069,13 +1071,6 @@
             if (mac.size() != 6) return BAD_VALUE;
             snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
                     mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
-            // special case for anonymized mac address:
-            // change anonymized bytes back from FD:FF:FF:FF to XX:XX:XX:XX
-            std::string address(addressBuffer);
-            if (address.compare(0, strlen("FD:FF:FF:FF"), "FD:FF:FF:FF") == 0) {
-                address.replace(0, strlen("FD:FF:FF:FF"), "XX:XX:XX:XX");
-            }
-            strcpy(addressBuffer, address.c_str());
         } break;
         case Tag::ipv4: {
             const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
@@ -1136,20 +1131,11 @@
     if (!legacyAddress.empty()) {
         switch (suggestDeviceAddressTag(aidl.type)) {
             case Tag::mac: {
-                // special case for anonymized mac address:
-                // change anonymized bytes so that they can be scanned as HEX bytes
-                // Use '01' for LSB bits 0 and 1 as Bluetooth MAC addresses are never multicast
-                // and universaly administered
-                std::string address = legacyAddress;
-                if (address.compare(0, strlen("XX:XX:XX:XX"), "XX:XX:XX:XX") == 0) {
-                    address.replace(0, strlen("XX:XX:XX:XX"), "FD:FF:FF:FF");
-                }
-
                 std::vector<uint8_t> mac(6);
-                int status = sscanf(address.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+                int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
                         &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
                 if (status != mac.size()) {
-                    ALOGE("%s: malformed MAC address: \"%s\"", __func__, address.c_str());
+                    ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
                     return unexpected(BAD_VALUE);
                 }
                 aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::mac>(std::move(mac));
@@ -3317,6 +3303,138 @@
     return OK;
 }
 
+ConversionResult<audio_policy_force_use_t>
+aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(AudioPolicyForceUse aidl) {
+    switch (aidl) {
+        case AudioPolicyForceUse::COMMUNICATION:
+            return AUDIO_POLICY_FORCE_FOR_COMMUNICATION;
+        case AudioPolicyForceUse::MEDIA:
+            return AUDIO_POLICY_FORCE_FOR_MEDIA;
+        case AudioPolicyForceUse::RECORD:
+            return AUDIO_POLICY_FORCE_FOR_RECORD;
+        case AudioPolicyForceUse::DOCK:
+            return AUDIO_POLICY_FORCE_FOR_DOCK;
+        case AudioPolicyForceUse::SYSTEM:
+            return AUDIO_POLICY_FORCE_FOR_SYSTEM;
+        case AudioPolicyForceUse::HDMI_SYSTEM_AUDIO:
+            return AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO;
+        case AudioPolicyForceUse::ENCODED_SURROUND:
+            return AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND;
+        case AudioPolicyForceUse::VIBRATE_RINGING:
+            return AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioPolicyForceUse>
+legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy) {
+    switch (legacy) {
+        case AUDIO_POLICY_FORCE_FOR_COMMUNICATION:
+            return AudioPolicyForceUse::COMMUNICATION;
+        case AUDIO_POLICY_FORCE_FOR_MEDIA:
+            return AudioPolicyForceUse::MEDIA;
+        case AUDIO_POLICY_FORCE_FOR_RECORD:
+            return AudioPolicyForceUse::RECORD;
+        case AUDIO_POLICY_FORCE_FOR_DOCK:
+            return AudioPolicyForceUse::DOCK;
+        case AUDIO_POLICY_FORCE_FOR_SYSTEM:
+            return AudioPolicyForceUse::SYSTEM;
+        case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO:
+            return AudioPolicyForceUse::HDMI_SYSTEM_AUDIO;
+        case AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND:
+            return AudioPolicyForceUse::ENCODED_SURROUND;
+        case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
+            return AudioPolicyForceUse::VIBRATE_RINGING;
+        case AUDIO_POLICY_FORCE_USE_CNT:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<audio_policy_forced_cfg_t>
+aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(AudioPolicyForcedConfig aidl) {
+    switch (aidl) {
+        case AudioPolicyForcedConfig::NONE:
+            return AUDIO_POLICY_FORCE_NONE;
+        case AudioPolicyForcedConfig::SPEAKER:
+            return AUDIO_POLICY_FORCE_SPEAKER;
+        case AudioPolicyForcedConfig::HEADPHONES:
+            return AUDIO_POLICY_FORCE_HEADPHONES;
+        case AudioPolicyForcedConfig::BT_SCO:
+            return AUDIO_POLICY_FORCE_BT_SCO;
+        case AudioPolicyForcedConfig::BT_A2DP:
+            return AUDIO_POLICY_FORCE_BT_A2DP;
+        case AudioPolicyForcedConfig::WIRED_ACCESSORY:
+            return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
+        case AudioPolicyForcedConfig::BT_CAR_DOCK:
+            return AUDIO_POLICY_FORCE_BT_CAR_DOCK;
+        case AudioPolicyForcedConfig::BT_DESK_DOCK:
+            return AUDIO_POLICY_FORCE_BT_DESK_DOCK;
+        case AudioPolicyForcedConfig::ANALOG_DOCK:
+            return AUDIO_POLICY_FORCE_ANALOG_DOCK;
+        case AudioPolicyForcedConfig::DIGITAL_DOCK:
+            return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
+        case AudioPolicyForcedConfig::NO_BT_A2DP:
+            return AUDIO_POLICY_FORCE_NO_BT_A2DP;
+        case AudioPolicyForcedConfig::SYSTEM_ENFORCED:
+            return AUDIO_POLICY_FORCE_SYSTEM_ENFORCED;
+        case AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED:
+            return AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED;
+        case AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER;
+        case AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS;
+        case AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL:
+            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL;
+        case AudioPolicyForcedConfig::BT_BLE:
+            return AUDIO_POLICY_FORCE_BT_BLE;
+    }
+    return unexpected(BAD_VALUE);
+}
+
+ConversionResult<AudioPolicyForcedConfig>
+legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy) {
+    switch (legacy) {
+        case AUDIO_POLICY_FORCE_NONE:
+            return AudioPolicyForcedConfig::NONE;
+        case AUDIO_POLICY_FORCE_SPEAKER:
+            return AudioPolicyForcedConfig::SPEAKER;
+        case AUDIO_POLICY_FORCE_HEADPHONES:
+            return AudioPolicyForcedConfig::HEADPHONES;
+        case AUDIO_POLICY_FORCE_BT_SCO:
+            return AudioPolicyForcedConfig::BT_SCO;
+        case AUDIO_POLICY_FORCE_BT_A2DP:
+            return AudioPolicyForcedConfig::BT_A2DP;
+        case AUDIO_POLICY_FORCE_WIRED_ACCESSORY:
+            return AudioPolicyForcedConfig::WIRED_ACCESSORY;
+        case AUDIO_POLICY_FORCE_BT_CAR_DOCK:
+            return AudioPolicyForcedConfig::BT_CAR_DOCK;
+        case AUDIO_POLICY_FORCE_BT_DESK_DOCK:
+            return AudioPolicyForcedConfig::BT_DESK_DOCK;
+        case AUDIO_POLICY_FORCE_ANALOG_DOCK:
+            return AudioPolicyForcedConfig::ANALOG_DOCK;
+        case AUDIO_POLICY_FORCE_DIGITAL_DOCK:
+            return AudioPolicyForcedConfig::DIGITAL_DOCK;
+        case AUDIO_POLICY_FORCE_NO_BT_A2DP:
+            return AudioPolicyForcedConfig::NO_BT_A2DP;
+        case AUDIO_POLICY_FORCE_SYSTEM_ENFORCED:
+            return AudioPolicyForcedConfig::SYSTEM_ENFORCED;
+        case AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED:
+            return AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED;
+        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER:
+            return AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER;
+        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS:
+            return AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS;
+        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL:
+            return AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL;
+        case AUDIO_POLICY_FORCE_BT_BLE:
+            return AudioPolicyForcedConfig::BT_BLE;
+        case AUDIO_POLICY_FORCE_CFG_CNT:
+            break;
+    }
+    return unexpected(BAD_VALUE);
+}
+
 }  // namespace android
 
 #undef GET_DEVICE_DESC_CONNECTION
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 9b14a5e..5f7260d 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -35,6 +35,7 @@
 
 using hardware::audio::common::PlaybackTrackMetadata;
 using hardware::audio::common::RecordTrackMetadata;
+using hardware::audio::common::SourceMetadata;
 using ::android::BAD_VALUE;
 using ::android::OK;
 
@@ -194,5 +195,17 @@
     return aidl;
 }
 
+// static
+ConversionResult<SourceMetadata>
+legacy2aidl_playback_track_metadata_v7_SourceMetadata(
+        const std::vector<playback_track_metadata_v7_t>& legacy) {
+    SourceMetadata aidl;
+    aidl.tracks = VALUE_OR_RETURN(
+            convertContainer<std::vector<PlaybackTrackMetadata>>(
+                    legacy,
+                    legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
+    return aidl;
+}
+
 }  // namespace android
 }  // aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
index 7268464..9dfb7e7 100644
--- a/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
+++ b/media/audioaidlconversion/include/media/AidlConversionCppNdk-impl.h
@@ -58,6 +58,8 @@
 #include PREFIX(android/media/audio/common/AudioMode.h)
 #include PREFIX(android/media/audio/common/AudioOffloadInfo.h)
 #include PREFIX(android/media/audio/common/AudioOutputFlags.h)
+#include PREFIX(android/media/audio/common/AudioPolicyForceUse.h)
+#include PREFIX(android/media/audio/common/AudioPolicyForcedConfig.h)
 #include PREFIX(android/media/audio/common/AudioPort.h)
 #include PREFIX(android/media/audio/common/AudioPortConfig.h)
 #include PREFIX(android/media/audio/common/AudioPortExt.h)
@@ -76,6 +78,7 @@
 
 #include <system/audio.h>
 #include <system/audio_effect.h>
+#include <system/audio_policy.h>
 
 #if defined(BACKEND_NDK_IMPL)
 namespace aidl {
@@ -454,6 +457,18 @@
         media::audio::common::MicrophoneInfo* aidlInfo,
         media::audio::common::MicrophoneDynamicInfo* aidlDynamic);
 
+ConversionResult<audio_policy_forced_cfg_t>
+aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(
+        media::audio::common::AudioPolicyForcedConfig aidl);
+ConversionResult<media::audio::common::AudioPolicyForcedConfig>
+legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy);
+
+ConversionResult<audio_policy_force_use_t>
+aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(
+        media::audio::common::AudioPolicyForceUse aidl);
+ConversionResult<media::audio::common::AudioPolicyForceUse>
+legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy);
+
 }  // namespace android
 
 #if defined(BACKEND_NDK_IMPL)
diff --git a/media/audioaidlconversion/include/media/AidlConversionEffect.h b/media/audioaidlconversion/include/media/AidlConversionEffect.h
index b03d06b..e51bf8b 100644
--- a/media/audioaidlconversion/include/media/AidlConversionEffect.h
+++ b/media/audioaidlconversion/include/media/AidlConversionEffect.h
@@ -72,9 +72,6 @@
                 MAKE_EXTENSION_PARAMETER_ID(_effect, _tag##Tag, _extId);                          \
         aidl::android::hardware::audio::effect::Parameter _aidlParam;                             \
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(_id, &_aidlParam))); \
-        aidl::android::hardware::audio::effect::VendorExtension _ext =                            \
-                VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(                              \
-                        _aidlParam, _effect, _tag, _effect::vendor, VendorExtension));            \
         return VALUE_OR_RETURN_STATUS(                                                            \
                 aidl::android::aidl2legacy_Parameter_EffectParameterWriter(_aidlParam, _param));  \
     }
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index 813a728..b8a3110 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -28,6 +28,7 @@
 
 #include <aidl/android/hardware/audio/common/PlaybackTrackMetadata.h>
 #include <aidl/android/hardware/audio/common/RecordTrackMetadata.h>
+#include <aidl/android/hardware/audio/common/SourceMetadata.h>
 #include <aidl/android/media/audio/common/AudioConfig.h>
 #include <media/AidlConversionUtil.h>
 
@@ -56,5 +57,9 @@
 ConversionResult<hardware::audio::common::RecordTrackMetadata>
 legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
 
+ConversionResult<hardware::audio::common::SourceMetadata>
+legacy2aidl_playback_track_metadata_v7_SourceMetadata(
+        const std::vector<playback_track_metadata_v7_t>& legacy);
+
 }  // namespace android
 }  // namespace aidl
diff --git a/media/audioserver/Android.bp b/media/audioserver/Android.bp
index e74fb91..47b48e3 100644
--- a/media/audioserver/Android.bp
+++ b/media/audioserver/Android.bp
@@ -20,12 +20,6 @@
         "-Werror",
     ],
 
-    header_libs: [
-        "libaudiohal_headers",
-        "libmedia_headers",
-        "libmediametrics_headers",
-    ],
-
     defaults: [
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_media_audio_common_types_cpp_shared",
@@ -39,39 +33,10 @@
         "libaudioflinger",
         "libaudiopolicyservice",
         "libmedialogservice",
-        "libnbaio",
     ],
 
     shared_libs: [
-        "libaudioclient",
-        "libaudioprocessing",
-        "libbinder",
-        "libcutils",
-        "libhidlbase",
-        "liblog",
-        "libmedia",
-        "libmediautils",
-        "libnblog",
-        "libpowermanager",
-        "libutils",
-        "libvibrator",
-    ],
-
-    // TODO check if we still need all of these include directories
-    include_dirs: [
-        "external/sonic",
-        "frameworks/av/media/libaaudio/include",
-        "frameworks/av/media/libaaudio/src",
-        "frameworks/av/media/libaaudio/src/binding",
-        "frameworks/av/services/audioflinger",
-        "frameworks/av/services/audiopolicy",
-        "frameworks/av/services/audiopolicy/common/include",
-        "frameworks/av/services/audiopolicy/common/managerdefinitions/include",
-        "frameworks/av/services/audiopolicy/engine/interface",
-        "frameworks/av/services/audiopolicy/service",
-        "frameworks/av/services/medialog",
-        "frameworks/av/services/oboeservice", // TODO oboeservice is the old folder name for aaudioservice. It will be changed.
-
+        "libhidlbase", // required for threadpool config.
     ],
 
     init_rc: ["audioserver.rc"],
diff --git a/media/audioserver/main_audioserver.cpp b/media/audioserver/main_audioserver.cpp
index 55847f4..5d7daa4 100644
--- a/media/audioserver/main_audioserver.cpp
+++ b/media/audioserver/main_audioserver.cpp
@@ -168,6 +168,7 @@
         ALOGW_IF(AudioSystem::setLocalAudioFlinger(af) != OK,
                 "%s: AudioSystem already has an AudioFlinger instance!", __func__);
         const auto aps = sp<AudioPolicyService>::make();
+        af->initAudioPolicyLocal(aps);
         ALOGD("%s: AudioPolicy created", __func__);
         ALOGW_IF(AudioSystem::setLocalAudioPolicyService(aps) != OK,
                  "%s: AudioSystem already has an AudioPolicyService instance!", __func__);
diff --git a/media/codec2/components/avc/Android.bp b/media/codec2/components/avc/Android.bp
index a7ae85b..8ccb9ac 100644
--- a/media/codec2/components/avc/Android.bp
+++ b/media/codec2/components/avc/Android.bp
@@ -17,6 +17,10 @@
 
     static_libs: ["libavcdec"],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=1",
+    ],
+
     srcs: ["C2SoftAvcDec.cpp"],
 
     export_include_dirs: ["."],
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 3385b95..77fdeb9 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftAvcDec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -416,7 +419,7 @@
     ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
-    s_create_ip.u4_keep_threads_active = 1;
+    s_create_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
     s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
     s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorFormat;
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index 4b189b4..2b59ee3 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -43,7 +43,7 @@
     ],
 
     static_libs: [
-        "libyuv_static", // for conversion routines
+        "libyuv", // for conversion routines
     ],
 
     shared_libs: [
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 06a21f6..aec6523 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -712,6 +712,7 @@
         case kWhatStop: {
             int32_t err = thiz->onStop();
             thiz->mOutputBlockPool.reset();
+            mRunning = false;
             Reply(msg, &err);
             break;
         }
diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp
index a17b04e..ca65aa2 100644
--- a/media/codec2/components/cmds/codec2.cpp
+++ b/media/codec2/components/cmds/codec2.cpp
@@ -46,7 +46,6 @@
 #include <media/stagefright/Utils.h>
 
 #include <gui/GLConsumer.h>
-#include <gui/IProducerListener.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 
@@ -91,7 +90,7 @@
     std::shared_ptr<Listener> mListener;
     std::shared_ptr<C2Component> mComponent;
 
-    sp<IProducerListener> mProducerListener;
+    sp<SurfaceListener> mSurfaceListener;
 
     std::atomic_int mLinearPoolId;
 
@@ -138,7 +137,7 @@
 
 SimplePlayer::SimplePlayer()
     : mListener(new Listener(this)),
-      mProducerListener(new StubProducerListener),
+      mSurfaceListener(new StubSurfaceListener),
       mLinearPoolId(C2BlockPool::PLATFORM_START),
       mComposerClient(new SurfaceComposerClient) {
     CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
@@ -164,7 +163,7 @@
 
     mSurface = mControl->getSurface();
     CHECK(mSurface != nullptr);
-    mSurface->connect(NATIVE_WINDOW_API_CPU, mProducerListener);
+    mSurface->connect(NATIVE_WINDOW_API_CPU, mSurfaceListener);
 }
 
 SimplePlayer::~SimplePlayer() {
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.cpp b/media/codec2/components/flac/C2SoftFlacEnc.cpp
index 7b63e75..780660e 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.cpp
+++ b/media/codec2/components/flac/C2SoftFlacEnc.cpp
@@ -155,7 +155,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -186,7 +186,7 @@
     mSignalledError = false;
     mSignalledOutputEos = false;
     mIsFirstFrame = true;
-    mAnchorTimeStamp = 0ull;
+    mAnchorTimeStamp = 0;
     mProcessedSamples = 0u;
     mEncoderWriteData = false;
     mEncoderReturnedNbBytes = 0;
@@ -236,7 +236,7 @@
               inSize, (int)work->input.ordinal.timestamp.peeku(),
               (int)work->input.ordinal.frameIndex.peeku(), work->input.flags);
     if (mIsFirstFrame && inSize) {
-        mAnchorTimeStamp = work->input.ordinal.timestamp.peekull();
+        mAnchorTimeStamp = work->input.ordinal.timestamp.peekll();
         mIsFirstFrame = false;
     }
 
@@ -405,7 +405,7 @@
     C2WriteView wView = mOutputBlock->map().get();
     uint8_t* outData = wView.data();
     const uint32_t sampleRate = mIntf->getSampleRate();
-    const uint64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
+    const int64_t outTimeStamp = mProcessedSamples * 1000000ll / sampleRate;
     ALOGV("writing %zu bytes of encoded data on output", bytes);
     // increment mProcessedSamples to maintain audio synchronization during
     // play back
diff --git a/media/codec2/components/flac/C2SoftFlacEnc.h b/media/codec2/components/flac/C2SoftFlacEnc.h
index 1f3be3c..ed9c298 100644
--- a/media/codec2/components/flac/C2SoftFlacEnc.h
+++ b/media/codec2/components/flac/C2SoftFlacEnc.h
@@ -72,7 +72,7 @@
     bool mSignalledOutputEos;
     uint32_t mBlockSize;
     bool mIsFirstFrame;
-    uint64_t mAnchorTimeStamp;
+    int64_t mAnchorTimeStamp;
     uint64_t mProcessedSamples;
     // should the data received by the callback be written to the output port
     bool mEncoderWriteData;
diff --git a/media/codec2/components/gav1/Android.bp b/media/codec2/components/gav1/Android.bp
index 9781b6d..f22490d 100644
--- a/media/codec2/components/gav1/Android.bp
+++ b/media/codec2/components/gav1/Android.bp
@@ -23,7 +23,7 @@
     srcs: ["C2SoftGav1Dec.cpp"],
     static_libs: [
         "libgav1",
-        "libyuv_static",
+        "libyuv",
     ],
 
     apex_available: [
diff --git a/media/codec2/components/hevc/Android.bp b/media/codec2/components/hevc/Android.bp
index d1388b9..cb9c2ae 100644
--- a/media/codec2/components/hevc/Android.bp
+++ b/media/codec2/components/hevc/Android.bp
@@ -15,6 +15,10 @@
         "libcodec2_soft_sanitize_cfi-defaults",
     ],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=1",
+    ],
+
     srcs: ["C2SoftHevcDec.cpp"],
 
     static_libs: ["libhevcdec"],
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 81db2a1..64aa7a4 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftHevcDec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -407,7 +410,7 @@
     ivdext_create_op_t s_create_op = {};
 
     s_create_ip.s_ivd_create_ip_t.u4_size = sizeof(ivdext_create_ip_t);
-    s_create_ip.u4_keep_threads_active = 1;
+    s_create_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_create_ip.s_ivd_create_ip_t.e_cmd = IVD_CMD_CREATE;
     s_create_ip.s_ivd_create_ip_t.u4_share_disp_buf = 0;
     s_create_ip.s_ivd_create_ip_t.e_output_format = mIvColorformat;
diff --git a/media/codec2/components/mp3/C2SoftMp3Dec.cpp b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
index 149c6ee..aed5e68 100644
--- a/media/codec2/components/mp3/C2SoftMp3Dec.cpp
+++ b/media/codec2/components/mp3/C2SoftMp3Dec.cpp
@@ -114,7 +114,9 @@
 c2_status_t C2SoftMP3::onStop() {
     // Make sure that the next buffer output does not still
     // depend on fragments from the last one decoded.
-    pvmp3_InitDecoder(mConfig, mDecoderBuf);
+    if (mDecoderBuf) {
+        pvmp3_InitDecoder(mConfig, mDecoderBuf);
+    }
     mSignalledError = false;
     mIsFirst = true;
     mSignalledOutputEos = false;
diff --git a/media/codec2/components/mpeg2/Android.bp b/media/codec2/components/mpeg2/Android.bp
index a58044c..e644ee3 100644
--- a/media/codec2/components/mpeg2/Android.bp
+++ b/media/codec2/components/mpeg2/Android.bp
@@ -14,6 +14,10 @@
         "libcodec2_soft_sanitize_signed-defaults",
     ],
 
+    cflags: [
+        "-DKEEP_THREADS_ACTIVE=0",
+    ],
+
     srcs: ["C2SoftMpeg2Dec.cpp"],
 
     static_libs: ["libmpeg2dec"],
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 491098d..562dcf5 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -16,6 +16,9 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "C2SoftMpeg2Dec"
+#ifndef KEEP_THREADS_ACTIVE
+#define KEEP_THREADS_ACTIVE 0
+#endif
 #include <log/log.h>
 
 #include <media/stagefright/foundation/MediaDefs.h>
@@ -433,7 +436,7 @@
 
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.u4_size = sizeof(ivdext_fill_mem_rec_ip_t);
     s_fill_mem_ip.u4_share_disp_buf = 0;
-    s_fill_mem_ip.u4_keep_threads_active = 1;
+    s_fill_mem_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_fill_mem_ip.e_output_format = mIvColorformat;
     s_fill_mem_ip.u4_deinterlace = 1;
     s_fill_mem_ip.s_ivd_fill_mem_rec_ip_t.e_cmd = IV_CMD_FILL_NUM_MEM_REC;
@@ -475,7 +478,7 @@
     s_init_ip.s_ivd_init_ip_t.u4_frm_max_ht = mHeight;
     s_init_ip.u4_share_disp_buf = 0;
     s_init_ip.u4_deinterlace = 1;
-    s_init_ip.u4_keep_threads_active = 1;
+    s_init_ip.u4_keep_threads_active = KEEP_THREADS_ACTIVE;
     s_init_ip.s_ivd_init_ip_t.u4_num_mem_rec = mNumMemRecords;
     s_init_ip.s_ivd_init_ip_t.e_output_format = mIvColorformat;
     s_init_op.s_ivd_init_op_t.u4_size = sizeof(ivdext_init_op_t);
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index 08e2fa6..3e88acd 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -465,6 +465,7 @@
       mTemporalPatternIdx(0),
       mLastTimestamp(0x7FFFFFFFFFFFFFFFull),
       mSignalledOutputEos(false),
+      mHeaderGenerated(false),
       mSignalledError(false) {
     for (int i = 0; i < MAXTEMPORALLAYERS; i++) {
         mTemporalLayerBitrateRatio[i] = 1.0f;
@@ -494,6 +495,7 @@
 
     // this one is not allocated by us
     mCodecInterface = nullptr;
+    mHeaderGenerated = false;
 }
 
 c2_status_t C2SoftVpxEnc::onStop() {
@@ -558,6 +560,7 @@
           (uint32_t)mBitrateControlMode, mTemporalLayers, mIntf->getSyncFramePeriod(),
           mMinQuantizer, mMaxQuantizer);
 
+    mHeaderGenerated = false;
     mCodecConfiguration = new vpx_codec_enc_cfg_t;
     if (!mCodecConfiguration) goto CleanUp;
     codec_return = vpx_codec_enc_config_default(mCodecInterface,
@@ -873,6 +876,27 @@
         return;
     }
 
+    // Header generation is limited to Android V and above, as MediaMuxer did not handle
+    // CSD for VP9 correctly in Android U and before.
+    if (isAtLeastV() && !mHeaderGenerated) {
+        vpx_fixed_buf_t* codec_private_data = vpx_codec_get_global_headers(mCodecContext);
+        if (codec_private_data) {
+            std::unique_ptr<C2StreamInitDataInfo::output> csd =
+                    C2StreamInitDataInfo::output::AllocUnique(codec_private_data->sz, 0u);
+            if (!csd) {
+                ALOGE("CSD allocation failed");
+                mSignalledError = true;
+                work->result = C2_NO_MEMORY;
+                work->workletsProcessed = 1u;
+                return;
+            }
+            memcpy(csd->m.value, codec_private_data->buf, codec_private_data->sz);
+            work->worklets.front()->output.configUpdate.push_back(std::move(csd));
+            ALOGV("CSD Produced of size %zu bytes", codec_private_data->sz);
+        }
+        mHeaderGenerated = true;
+    }
+
     const C2ConstGraphicBlock inBuffer =
         inputBuffer->data().graphicBlocks().front();
     if (inBuffer.width() < mSize->width ||
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 980de04..87d24f9 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -207,6 +207,9 @@
      // Signalled EOS
      bool mSignalledOutputEos;
 
+     // Header generated
+     bool mHeaderGenerated;
+
      // Signalled Error
      bool mSignalledError;
 
diff --git a/media/codec2/core/Android.bp b/media/codec2/core/Android.bp
index 7d5740b..c205dcd 100644
--- a/media/codec2/core/Android.bp
+++ b/media/codec2/core/Android.bp
@@ -26,9 +26,6 @@
         "//apex_available:platform",
         "com.android.media.swcodec",
     ],
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: ["C2.cpp"],
diff --git a/media/codec2/fuzzer/Android.bp b/media/codec2/fuzzer/Android.bp
index b387b2c..ec77427 100644
--- a/media/codec2/fuzzer/Android.bp
+++ b/media/codec2/fuzzer/Android.bp
@@ -163,7 +163,7 @@
 
     static_libs: [
         "libgav1",
-        "libyuv_static",
+        "libyuv",
         "libcodec2_soft_av1dec_gav1",
     ],
 }
diff --git a/media/codec2/hal/aidl/Android.bp b/media/codec2/hal/aidl/Android.bp
index 48b6e21..e16e2b1 100644
--- a/media/codec2/hal/aidl/Android.bp
+++ b/media/codec2/hal/aidl/Android.bp
@@ -8,6 +8,7 @@
     name: "libcodec2_aidl_client",
 
     defaults: [
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
@@ -65,6 +66,7 @@
     ],
 
     defaults: [
+        "aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
diff --git a/media/codec2/hal/aidl/ComponentStore.cpp b/media/codec2/hal/aidl/ComponentStore.cpp
index b95c09e..de9332b 100644
--- a/media/codec2/hal/aidl/ComponentStore.cpp
+++ b/media/codec2/hal/aidl/ComponentStore.cpp
@@ -36,7 +36,7 @@
 #include <ostream>
 #include <sstream>
 
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
 #include <codec2/hidl/plugin/FilterPlugin.h>
 #include <dlfcn.h>
 #include <C2Config.h>
@@ -51,7 +51,7 @@
 namespace c2 {
 namespace utils {
 
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
 using ::android::DefaultFilterPlugin;
 using ::android::FilterWrapper;
 #endif
@@ -144,7 +144,22 @@
     ::android::SetPreferredCodec2ComponentStore(store);
 
     // Retrieve struct descriptors
-    mParamReflector = mStore->getParamReflector();
+    mParamReflectors.push_back(mStore->getParamReflector());
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
+    std::shared_ptr<C2ParamReflector> paramReflector =
+        GetFilterWrapper()->getParamReflector();
+    if (paramReflector != nullptr) {
+        ALOGD("[%s] added param reflector from filter wrapper", mStore->getName().c_str());
+        mParamReflectors.push_back(paramReflector);
+    }
+#endif
+    // MultiAccessUnit reflector helper is allocated once per store.
+    // All components in this store can reuse this reflector helper.
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        std::shared_ptr<C2ReflectorHelper> helper = std::make_shared<C2ReflectorHelper>();
+        mParamReflectors.push_back(helper);
+        mMultiAccessUnitReflector = helper;
+    }
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -173,8 +188,7 @@
         std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
         auto it = mStructDescriptors.find(coreIndex);
         if (it == mStructDescriptors.end()) {
-            std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+            std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
             if (!structDesc) {
                 // All supported params must be described
                 res = C2_BAD_INDEX;
@@ -189,7 +203,7 @@
     return mParameterCache;
 }
 
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
 // static
 std::shared_ptr<FilterWrapper> ComponentStore::GetFilterWrapper() {
     constexpr const char kPluginPath[] = "libc2filterplugin.so";
@@ -205,12 +219,20 @@
     if (c2interface == nullptr) {
         return nullptr;
     }
+    // Framework support for Large audio frame feature depends on:
+    // 1. All feature flags enabled on platform
+    // 2. The capability of the implementation to use the same input buffer
+    //    for different C2Work (C2Config::api_feature_t::API_SAME_INPUT_BUFFER)
+    // 3. Implementation does not inherently support C2LargeFrame::output::PARAM_TYPE param.
     if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
         c2_status_t err = C2_OK;
         C2ComponentDomainSetting domain;
         std::vector<std::unique_ptr<C2Param>> heapParams;
-        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
-        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+        C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+        err = c2interface->query_vb({&domain, &features}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK
+                && (domain.value == C2Component::DOMAIN_AUDIO)
+                && ((features.value & C2Config::api_feature_t::API_SAME_INPUT_BUFFER) != 0)) {
             std::vector<std::shared_ptr<C2ParamDescriptor>> params;
             bool isComponentSupportsLargeAudioFrame = false;
             c2interface->querySupportedParams_nb(&params);
@@ -221,8 +243,13 @@
                 }
             }
             if (!isComponentSupportsLargeAudioFrame) {
+                // TODO - b/342269852: MultiAccessUnitInterface also needs to take multiple
+                // param reflectors. Currently filters work on video domain only,
+                // and the MultiAccessUnitHelper is only enabled on audio domain;
+                // thus we pass the component's param reflector, which is mParamReflectors[0].
                 multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
-                        c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+                        c2interface,
+                        mMultiAccessUnitReflector);
             }
         }
     }
@@ -250,7 +277,7 @@
             mStore->createComponent(name, &c2component);
 
     if (status == C2_OK) {
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
         c2component = GetFilterWrapper()->maybeWrapComponent(c2component);
 #endif
         onInterfaceLoaded(c2component->intf());
@@ -284,7 +311,7 @@
     std::shared_ptr<C2ComponentInterface> c2interface;
     c2_status_t res = mStore->createInterface(name, &c2interface);
     if (res == C2_OK) {
-#ifndef __ANDROID_APEX__
+#ifndef __ANDROID_APEX__  // Filters are not supported for APEX modules
         c2interface = GetFilterWrapper()->maybeWrapInterface(c2interface);
 #endif
         onInterfaceLoaded(c2interface);
@@ -347,8 +374,7 @@
         if (item == mStructDescriptors.end()) {
             // not in the cache, and not known to be unsupported, query local reflector
             if (!mUnsupportedStructDescriptors.count(coreIndex)) {
-                std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+                std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
                 if (!structDesc) {
                     mUnsupportedStructDescriptors.emplace(coreIndex);
                 } else {
@@ -401,6 +427,16 @@
     return ScopedAStatus::ok();
 }
 
+std::shared_ptr<C2StructDescriptor> ComponentStore::describe(const C2Param::CoreIndex &index) {
+    for (const std::shared_ptr<C2ParamReflector> &reflector : mParamReflectors) {
+        std::shared_ptr<C2StructDescriptor> desc = reflector->describe(index);
+        if (desc) {
+            return desc;
+        }
+    }
+    return nullptr;
+}
+
 // Called from createComponent() after a successful creation of `component`.
 void ComponentStore::reportComponentBirth(Component* component) {
     ComponentStatus componentStatus;
diff --git a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
index 746e1bf..bb4c596 100644
--- a/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
+++ b/media/codec2/hal/aidl/include/codec2/aidl/ComponentStore.h
@@ -52,6 +52,13 @@
 using ::aidl::android::hardware::media::bufferpool2::IClientManager;
 
 struct ComponentStore : public BnComponentStore {
+    /**
+     * Constructor for ComponentStore.
+     *
+     * IMPORTANT: SetPreferredCodec2ComponentStore() is called in the constructor.
+     * Be careful about the order of SetPreferredCodec2ComponentStore() and
+     * ComponentStore() in the code.
+     */
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -118,7 +125,10 @@
 
     c2_status_t mInit;
     std::shared_ptr<C2ComponentStore> mStore;
-    std::shared_ptr<C2ParamReflector> mParamReflector;
+    std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
+
+    // Reflector helper for MultiAccessUnitHelper
+    std::shared_ptr<C2ReflectorHelper> mMultiAccessUnitReflector;
 
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
@@ -135,6 +145,9 @@
     mutable std::mutex mComponentRosterMutex;
     std::map<Component*, ComponentStatus> mComponentRoster;
 
+    // describe from mParamReflectors
+    std::shared_ptr<C2StructDescriptor> describe(const C2Param::CoreIndex &index);
+
     // Called whenever Component is created.
     void reportComponentBirth(Component* component);
     // Called only from the destructor of Component.
diff --git a/media/codec2/hal/client/Android.bp b/media/codec2/hal/client/Android.bp
index af6f4ae..864eeb8 100644
--- a/media/codec2/hal/client/Android.bp
+++ b/media/codec2/hal/client/Android.bp
@@ -33,6 +33,13 @@
         "libcodec2-aidl-client-defaults",
     ],
 
+    // http://b/343951602#comment4 Explicitly set cpp_std to gnu++20.  The
+    // default inherited from libcodec2-impl-defaults sets it to gnu++17 which
+    // causes a segfault when mixing global std::string symbols built with
+    // gnu++17 and gnu++20.  TODO(b/343951602): clean this after
+    // libcodec2-impl-defaults opt into gnu++17 is removed.
+    cpp_std: "gnu++20",
+
     header_libs: [
         "libcodec2_internal", // private
     ],
diff --git a/media/codec2/hal/client/GraphicBufferAllocator.cpp b/media/codec2/hal/client/GraphicBufferAllocator.cpp
index 8f489ec..6a6da0f 100644
--- a/media/codec2/hal/client/GraphicBufferAllocator.cpp
+++ b/media/codec2/hal/client/GraphicBufferAllocator.cpp
@@ -17,7 +17,6 @@
 #define LOG_TAG "Codec2-GraphicBufferAllocator"
 
 
-#include <gui/IProducerListener.h>
 #include <media/stagefright/foundation/ADebug.h>
 
 #include <codec2/aidl/GraphicBufferAllocator.h>
@@ -25,25 +24,6 @@
 
 namespace aidl::android::hardware::media::c2::implementation {
 
-class OnBufferReleasedListener : public ::android::BnProducerListener {
-private:
-    uint32_t mGeneration;
-    std::weak_ptr<GraphicBufferAllocator> mAllocator;
-public:
-    OnBufferReleasedListener(
-            uint32_t generation,
-            const std::shared_ptr<GraphicBufferAllocator> &allocator)
-            : mGeneration(generation), mAllocator(allocator) {}
-    virtual ~OnBufferReleasedListener() = default;
-    virtual void onBufferReleased() {
-        auto p = mAllocator.lock();
-        if (p) {
-            p->onBufferReleased(mGeneration);
-        }
-    }
-    virtual bool needsReleaseNotify() { return true; }
-};
-
 ::ndk::ScopedAStatus GraphicBufferAllocator::allocate(
         const IGraphicBufferAllocator::Description& in_desc,
         IGraphicBufferAllocator::Allocation* _aidl_return) {
@@ -108,15 +88,14 @@
     mGraphicsTracker->stop();
 }
 
-const ::android::sp<::android::IProducerListener> GraphicBufferAllocator::createReleaseListener(
-      uint32_t generation) {
-    return new OnBufferReleasedListener(generation, ref<GraphicBufferAllocator>());
-}
-
 void GraphicBufferAllocator::onBufferReleased(uint32_t generation) {
     mGraphicsTracker->onReleased(generation);
 }
 
+void GraphicBufferAllocator::onBufferAttached(uint32_t generation) {
+    mGraphicsTracker->onAttached(generation);
+}
+
 c2_status_t GraphicBufferAllocator::allocate(
         uint32_t width, uint32_t height, ::android::PixelFormat format, uint64_t usage,
         AHardwareBuffer **buf, ::android::sp<::android::Fence> *fence) {
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index 1c2a0fb..bdfc409 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -34,7 +34,7 @@
 
 c2_status_t retrieveAHardwareBufferId(const C2ConstGraphicBlock &blk, uint64_t *bid) {
     std::shared_ptr<const _C2BlockPoolData> bpData = _C2BlockFactory::GetGraphicBlockPoolData(blk);
-    if (bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
+    if (!bpData || bpData->getType() != _C2BlockPoolData::TYPE_AHWBUFFER) {
         return C2_BAD_VALUE;
     }
     if (__builtin_available(android __ANDROID_API_T__, *)) {
@@ -173,7 +173,7 @@
 }
 
 GraphicsTracker::GraphicsTracker(int maxDequeueCount)
-    : mBufferCache(new BufferCache()), mMaxDequeue{maxDequeueCount},
+    : mBufferCache(new BufferCache()), mNumDequeueing{0}, mMaxDequeue{maxDequeueCount},
     mMaxDequeueCommitted{maxDequeueCount},
     mDequeueable{maxDequeueCount},
     mTotalDequeued{0}, mTotalCancelled{0}, mTotalDropped{0}, mTotalReleased{0},
@@ -235,6 +235,7 @@
         const sp<IGraphicBufferProducer>& igbp, uint32_t generation) {
     // TODO: wait until operations to previous IGBP is completed.
     std::shared_ptr<BufferCache> prevCache;
+    int prevDequeueRequested = 0;
     int prevDequeueCommitted;
 
     std::unique_lock<std::mutex> cl(mConfigLock);
@@ -243,6 +244,9 @@
         mInConfig = true;
         prevCache = mBufferCache;
         prevDequeueCommitted = mMaxDequeueCommitted;
+        if (mMaxDequeueRequested.has_value()) {
+            prevDequeueRequested = mMaxDequeueRequested.value();
+        }
     }
     // NOTE: Switching to the same surface is blocked from MediaCodec.
     // Switching to the same surface might not work if tried, since disconnect()
@@ -263,6 +267,11 @@
         mInConfig = false;
         return C2_BAD_VALUE;
     }
+    ALOGD("new surface in configuration: maxDequeueRequested(%d), maxDequeueCommitted(%d)",
+          prevDequeueRequested, prevDequeueCommitted);
+    if (prevDequeueRequested > 0 && prevDequeueRequested > prevDequeueCommitted) {
+        prevDequeueCommitted = prevDequeueRequested;
+    }
     if (igbp) {
         ret = igbp->setMaxDequeuedBufferCount(prevDequeueCommitted);
         if (ret != ::android::OK) {
@@ -280,6 +289,34 @@
         std::unique_lock<std::mutex> l(mLock);
         mInConfig = false;
         mBufferCache = newCache;
+        // {@code dequeued} is the number of currently dequeued buffers.
+        // {@code prevDequeueCommitted} is max dequeued buffer at any moment
+        //  from the new surface.
+        // {@code newDequeueable} is hence the current # of dequeueable buffers
+        //  if no change occurs.
+        int dequeued = mDequeued.size() + mNumDequeueing;
+        int newDequeueable = prevDequeueCommitted - dequeued;
+        if (newDequeueable < 0) {
+            // This will not happen.
+            // But if this happens, we respect the value and try to continue.
+            ALOGE("calculated new dequeueable is negative: %d max(%d),dequeued(%d)",
+                  newDequeueable, prevDequeueCommitted, dequeued);
+        }
+
+        if (mMaxDequeueRequested.has_value() && mMaxDequeueRequested == prevDequeueCommitted) {
+            mMaxDequeueRequested.reset();
+        }
+        mMaxDequeue = mMaxDequeueCommitted = prevDequeueCommitted;
+
+        int delta = newDequeueable - mDequeueable;
+        if (delta > 0) {
+            writeIncDequeueableLocked(delta);
+        } else if (delta < 0) {
+            drainDequeueableLocked(-delta);
+        }
+        ALOGV("new surfcace dequeueable %d(delta %d), maxDequeue %d",
+              newDequeueable, delta, mMaxDequeue);
+        mDequeueable = newDequeueable;
     }
     return C2_OK;
 }
@@ -529,6 +566,7 @@
             ALOGE("writing end for the waitable object seems to be closed");
             return C2_BAD_STATE;
         }
+        mNumDequeueing++;
         mDequeueable--;
         *cache = mBufferCache;
         return C2_OK;
@@ -543,6 +581,7 @@
                     bool cached, int slot, const sp<Fence> &fence,
                     std::shared_ptr<BufferItem> *pBuffer, bool *updateDequeue) {
     std::unique_lock<std::mutex> l(mLock);
+    mNumDequeueing--;
     if (res == C2_OK) {
         if (cached) {
             auto it = cache->mBuffers.find(slot);
@@ -625,16 +664,24 @@
 
     int slotId;
     uint64_t outBufferAge;
-    ::android::FrameEventHistoryDelta outTimestamps;
     sp<Fence> fence;
 
     ::android::status_t status = igbp->dequeueBuffer(
-            &slotId, &fence, width, height, format, usage, &outBufferAge, &outTimestamps);
+            &slotId, &fence, width, height, format, usage, &outBufferAge, nullptr);
     if (status < ::android::OK) {
         if (status == ::android::TIMED_OUT || status == ::android::WOULD_BLOCK) {
             ALOGW("BQ might not be ready for dequeueBuffer()");
             return C2_BLOCKING;
         }
+        bool cacheExpired = false;
+        {
+            std::unique_lock<std::mutex> l(mLock);
+            cacheExpired = (mBufferCache.get() != cache.get());
+        }
+        if (cacheExpired) {
+            ALOGW("a new BQ is configured. dequeueBuffer() error %d", (int)status);
+            return C2_BLOCKING;
+        }
         ALOGE("BQ in inconsistent status. dequeueBuffer() error %d", (int)status);
         return C2_CORRUPTED;
     }
@@ -655,7 +702,8 @@
             ALOGE("allocate by dequeueBuffer() successful, but requestBuffer() failed %d",
                   status);
             igbp->cancelBuffer(slotId, fence);
-            return C2_CORRUPTED;
+            // This might be due to life-cycle end and/or surface switching.
+            return C2_BLOCKING;
         }
         *buffer = std::make_shared<BufferItem>(generation, slotId, realloced, fence);
         if (!*buffer) {
@@ -776,6 +824,10 @@
     std::shared_ptr<BufferCache> cache;
     int slotId;
     sp<Fence> rFence;
+    if (mStopped.load() == true) {
+        ALOGE("cannot deallocate due to being stopped");
+        return C2_BAD_STATE;
+    }
     c2_status_t res = requestDeallocate(bid, fence, &completed, &updateDequeue,
                                         &cache, &slotId, &rFence);
     if (res != C2_OK) {
@@ -852,7 +904,10 @@
         cache->unblockSlot(buffer->mSlot);
         if (oldBuffer) {
             // migrated, register the new buffer to the cache.
-            cache->mBuffers.emplace(buffer->mSlot, buffer);
+            auto ret = cache->mBuffers.emplace(buffer->mSlot, buffer);
+            if (!ret.second) {
+                ret.first->second = buffer;
+            }
         }
     }
     mDeallocating.erase(origBid);
@@ -953,6 +1008,11 @@
     {
         std::unique_lock<std::mutex> l(mLock);
         if (mBufferCache->mGeneration == generation) {
+            if (mBufferCache->mNumAttached > 0) {
+                ALOGV("one onReleased() ignored for each prior onAttached().");
+                mBufferCache->mNumAttached--;
+                return;
+            }
             if (!adjustDequeueConfLocked(&updateDequeue)) {
                 mDequeueable++;
                 writeIncDequeueableLocked(1);
@@ -964,4 +1024,12 @@
     }
 }
 
+void GraphicsTracker::onAttached(uint32_t generation) {
+    std::unique_lock<std::mutex> l(mLock);
+    if (mBufferCache->mGeneration == generation) {
+        ALOGV("buffer attached");
+        mBufferCache->mNumAttached++;
+    }
+}
+
 } // namespace aidl::android::hardware::media::c2::implementation
diff --git a/media/codec2/hal/client/client.cpp b/media/codec2/hal/client/client.cpp
index 1d2794e..80735cb 100644
--- a/media/codec2/hal/client/client.cpp
+++ b/media/codec2/hal/client/client.cpp
@@ -2391,7 +2391,12 @@
                        "GraphicBufferAllocator was not created.";
             return C2_CORRUPTED;
         }
+        // Note: Consumer usage is set ahead of the HAL allocator(gba) being set.
+        // This is same as HIDL.
+        uint64_t consumerUsage = configConsumerUsage(surface);
         bool ret = gba->configure(surface, generation, maxDequeueCount);
+        ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx",
+              generation, (long long)consumerUsage);
         return ret ? C2_OK : C2_CORRUPTED;
     }
     uint64_t bqId = 0;
@@ -2419,41 +2424,9 @@
                                       mHidlBase1_2 ? &syncObj : nullptr);
     }
 
-    // set consumer bits
-    // TODO: should this get incorporated into setOutputSurface method so that consumer bits
-    // can be set atomically?
-    uint64_t consumerUsage = kDefaultConsumerUsage;
-    {
-        if (surface) {
-            uint64_t usage = 0;
-            status_t err = surface->getConsumerUsage(&usage);
-            if (err != NO_ERROR) {
-                ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
-                        err, asString(err));
-            } else {
-                // Note: we are adding the default usage because components must support
-                // producing output frames that can be displayed an all output surfaces.
-
-                // TODO: do not set usage for tunneled scenario. It is unclear if consumer usage
-                // is meaningful in a tunneled scenario; on one hand output buffers exist, but
-                // they do not exist inside of C2 scope. Any buffer usage shall be communicated
-                // through the sideband channel.
-
-                consumerUsage = usage | kDefaultConsumerUsage;
-            }
-        }
-
-        C2StreamUsageTuning::output outputUsage{
-                0u, C2AndroidMemoryUsage::FromGrallocUsage(consumerUsage).expected};
-        std::vector<std::unique_ptr<C2SettingResult>> failures;
-        c2_status_t err = config({&outputUsage}, C2_MAY_BLOCK, &failures);
-        if (err != C2_OK) {
-            ALOGD("setOutputSurface -- failed to set consumer usage (%d/%s)",
-                    err, asString(err));
-        }
-    }
+    uint64_t consumerUsage = configConsumerUsage(surface);
     ALOGD("setOutputSurface -- generation=%u consumer usage=%#llx%s",
-            generation, (long long)consumerUsage, syncObj ? " sync" : "");
+          generation, (long long)consumerUsage, syncObj ? " sync" : "");
 
     Return<c2_hidl::Status> transStatus = syncObj ?
             mHidlBase1_2->setOutputSurfaceWithSyncObj(
@@ -2495,6 +2468,44 @@
     return mOutputBufferQueue->outputBuffer(block, input, output);
 }
 
+uint64_t Codec2Client::Component::configConsumerUsage(
+        const sp<IGraphicBufferProducer>& surface) {
+    // set consumer bits
+    // TODO: should this get incorporated into setOutputSurface method so that consumer bits
+    // can be set atomically?
+    uint64_t consumerUsage = kDefaultConsumerUsage;
+    {
+        if (surface) {
+            uint64_t usage = 0;
+            status_t err = surface->getConsumerUsage(&usage);
+            if (err != NO_ERROR) {
+                ALOGD("setOutputSurface -- failed to get consumer usage bits (%d/%s). ignoring",
+                        err, asString(err));
+            } else {
+                // Note: we are adding the default usage because components must support
+                // producing output frames that can be displayed an all output surfaces.
+
+                // TODO: do not set usage for tunneled scenario. It is unclear if consumer usage
+                // is meaningful in a tunneled scenario; on one hand output buffers exist, but
+                // they do not exist inside of C2 scope. Any buffer usage shall be communicated
+                // through the sideband channel.
+
+                consumerUsage = usage | kDefaultConsumerUsage;
+            }
+        }
+
+        C2StreamUsageTuning::output outputUsage{
+                0u, C2AndroidMemoryUsage::FromGrallocUsage(consumerUsage).expected};
+        std::vector<std::unique_ptr<C2SettingResult>> failures;
+        c2_status_t err = config({&outputUsage}, C2_MAY_BLOCK, &failures);
+        if (err != C2_OK) {
+            ALOGD("setOutputSurface -- failed to set consumer usage (%d/%s)",
+                    err, asString(err));
+        }
+    }
+    return consumerUsage;
+}
+
 void Codec2Client::Component::pollForRenderedFrames(FrameEventHistoryDelta* delta) {
     if (mAidlBase) {
         // TODO b/311348680
@@ -2556,6 +2567,19 @@
     mOutputBufferQueue->onBufferReleased(generation);
 }
 
+void Codec2Client::Component::onBufferAttachedToOutputSurface(
+        uint32_t generation) {
+    if (mAidlBase) {
+        std::shared_ptr<AidlGraphicBufferAllocator> gba =
+                mGraphicBufferAllocators->current();
+        if (gba) {
+            gba->onBufferAttached(generation);
+        }
+        return;
+    }
+    mOutputBufferQueue->onBufferAttached(generation);
+}
+
 void Codec2Client::Component::holdIgbaBlocks(
         const std::list<std::unique_ptr<C2Work>>& workList) {
     if (!mAidlBase) {
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
index 902c53f..a797cb7 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicBufferAllocator.h
@@ -71,18 +71,6 @@
     void reset();
 
     /**
-     * Create a listener for buffer being released.
-     *
-     * Surface will register this listener and notify whenever the consumer
-     * releases a buffer.
-     *
-     * @param   generation        generation # for the BufferQueue.
-     * @return  IProducerListener can be used when connect# to Surface.
-     */
-    const ::android::sp<::android::IProducerListener> createReleaseListener(
-            uint32_t generation);
-
-    /**
      * Notifies a buffer being released.
      *
      * @param   generation        generation # for the BufferQueue.
@@ -90,6 +78,13 @@
     void onBufferReleased(uint32_t generation);
 
     /**
+     * Notifies a buffer being attached to the consumer.
+     *
+     * @param   generation        generation # for the BufferQueue.
+     */
+    void onBufferAttached(uint32_t generation);
+
+    /**
      * Allocates a buffer.
      *
      * @param   width             width of the requested buffer.
diff --git a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
index dd6c869..9a4fa12 100644
--- a/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
+++ b/media/codec2/hal/client/include/codec2/aidl/GraphicsTracker.h
@@ -143,6 +143,18 @@
     void onReleased(uint32_t generation);
 
     /**
+     * Notifies when a Buffer is attached to Graphics(consumer side).
+     * If generation does not match to the current, notifications via the interface
+     * will be ignored. (In the case, the notifications are from one of the old surfaces
+     * which is no longer used.)
+     * One onReleased() should be ignored for one onAttached() when both of
+     * them have the same generation as params.
+     *
+     * @param[in] generation    generation id for specifying Graphics(BQ)
+     */
+    void onAttached(uint32_t generation);
+
+    /**
      * Get waitable fd for events.(allocate is ready, end of life cycle)
      *
      * @param[out]  pipeFd      a file descriptor created from pipe2()
@@ -217,9 +229,11 @@
 
         BlockedSlot mBlockedSlots[kNumSlots];
 
-        BufferCache() : mBqId{0ULL}, mGeneration{0}, mIgbp{nullptr} {}
+        std::atomic<int> mNumAttached;
+
+        BufferCache() : mBqId{0ULL}, mGeneration{0}, mIgbp{nullptr}, mNumAttached{0} {}
         BufferCache(uint64_t bqId, uint32_t generation, const sp<IGraphicBufferProducer>& igbp) :
-            mBqId{bqId}, mGeneration{generation}, mIgbp{igbp} {}
+            mBqId{bqId}, mGeneration{generation}, mIgbp{igbp}, mNumAttached{0} {}
 
         ~BufferCache();
 
@@ -234,6 +248,7 @@
     // Maps bufferId to buffer
     std::map<uint64_t, std::shared_ptr<BufferItem>> mDequeued;
     std::set<uint64_t> mDeallocating;
+    int mNumDequeueing;
 
     // These member variables are read and modified accessed as follows.
     // 1. mConfigLock being held
diff --git a/media/codec2/hal/client/include/codec2/hidl/client.h b/media/codec2/hal/client/include/codec2/hidl/client.h
index 5c75a47..7923f04 100644
--- a/media/codec2/hal/client/include/codec2/hidl/client.h
+++ b/media/codec2/hal/client/include/codec2/hidl/client.h
@@ -467,6 +467,9 @@
             const QueueBufferInput& input,
             QueueBufferOutput* output);
 
+    // configure consumer usage.
+    uint64_t configConsumerUsage(const sp<IGraphicBufferProducer>& surface);
+
     // Retrieve frame event history from the output surface.
     void pollForRenderedFrames(FrameEventHistoryDelta* delta);
 
@@ -481,6 +484,10 @@
     void onBufferReleasedFromOutputSurface(
             uint32_t generation);
 
+    // Notify a buffer is attached to output surface.
+    void onBufferAttachedToOutputSurface(
+            uint32_t generation);
+
     // When the client received \p workList and the blocks inside
     // \p workList are IGBA based graphic blocks, specify the owner
     // as the current IGBA for the future operations.
diff --git a/media/codec2/hal/client/include/codec2/hidl/output.h b/media/codec2/hal/client/include/codec2/hidl/output.h
index fda34a8..108f0a6 100644
--- a/media/codec2/hal/client/include/codec2/hidl/output.h
+++ b/media/codec2/hal/client/include/codec2/hidl/output.h
@@ -69,6 +69,9 @@
     // update the number of dequeueable/allocatable buffers.
     void onBufferReleased(uint32_t generation);
 
+    // Nofify a buffer is attached to the output surface.
+    void onBufferAttached(uint32_t generation);
+
     // Retrieve frame event history from the output surface.
     void pollForRenderedFrames(FrameEventHistoryDelta* delta);
 
@@ -93,6 +96,7 @@
     uint64_t mBqId;
     int32_t mMaxDequeueBufferCount;
     std::shared_ptr<int> mOwner;
+    std::shared_ptr<int> mConsumerAttachCount;
     // To migrate existing buffers
     sp<GraphicBuffer> mBuffers[BufferQueueDefs::NUM_BUFFER_SLOTS]; // find a better way
     std::weak_ptr<_C2BlockPoolData> mPoolDatas[BufferQueueDefs::NUM_BUFFER_SLOTS];
diff --git a/media/codec2/hal/client/output.cpp b/media/codec2/hal/client/output.cpp
index 36322f5..2eb381b 100644
--- a/media/codec2/hal/client/output.cpp
+++ b/media/codec2/hal/client/output.cpp
@@ -261,6 +261,7 @@
         mGeneration = generation;
         mBqId = bqId;
         mOwner = std::make_shared<int>(0);
+        mConsumerAttachCount = std::make_shared<int>(0);
         mMaxDequeueBufferCount = maxDequeueBufferCount;
         if (igbp == nullptr) {
             return false;
@@ -522,6 +523,7 @@
     std::shared_ptr<C2SurfaceSyncMemory> syncMem;
     sp<IGraphicBufferProducer> outputIgbp;
     uint32_t outputGeneration = 0;
+    std::shared_ptr<int> consumerAttachCount;
     {
         std::unique_lock<std::mutex> l(mMutex);
         if (mStopped) {
@@ -529,6 +531,7 @@
         }
         outputIgbp = mIgbp;
         outputGeneration = mGeneration;
+        consumerAttachCount = mConsumerAttachCount;
         syncMem = mSyncMem;
     }
 
@@ -536,7 +539,39 @@
         auto syncVar = syncMem ? syncMem->mem() : nullptr;
         if (syncVar) {
             syncVar->lock();
-            syncVar->notifyQueuedLocked();
+            if (consumerAttachCount && *consumerAttachCount > 0) {
+                (*consumerAttachCount)--;
+            } else {
+                syncVar->notifyQueuedLocked();
+            }
+            syncVar->unlock();
+        }
+    }
+}
+
+void OutputBufferQueue::onBufferAttached(uint32_t generation) {
+    std::shared_ptr<C2SurfaceSyncMemory> syncMem;
+    sp<IGraphicBufferProducer> outputIgbp;
+    uint32_t outputGeneration = 0;
+    std::shared_ptr<int> consumerAttachCount;
+    {
+        std::unique_lock<std::mutex> l(mMutex);
+        if (mStopped) {
+            return;
+        }
+        outputIgbp = mIgbp;
+        outputGeneration = mGeneration;
+        consumerAttachCount = mConsumerAttachCount;
+        syncMem = mSyncMem;
+    }
+
+    if (outputIgbp && generation == outputGeneration) {
+        auto syncVar = syncMem ? syncMem->mem() : nullptr;
+        if (syncVar) {
+            syncVar->lock();
+            if (consumerAttachCount) {
+                (*consumerAttachCount)++;
+            }
             syncVar->unlock();
         }
     }
diff --git a/media/codec2/hal/common/Android.bp b/media/codec2/hal/common/Android.bp
index 7d7b285..0638363 100644
--- a/media/codec2/hal/common/Android.bp
+++ b/media/codec2/hal/common/Android.bp
@@ -28,8 +28,8 @@
         "liblog",
         "libstagefright_foundation",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
     ],
-
     static_libs: ["aconfig_mediacodec_flags_c_lib"],
 }
 
@@ -53,6 +53,7 @@
     shared_libs: [
         "libbase",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
     ],
 
     static_libs: ["aconfig_mediacodec_flags_c_lib"],
@@ -67,5 +68,6 @@
     shared_libs: [
         "libbase",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
     ],
 }
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index 8086ef2..b287b91 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -27,6 +27,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
+static inline constexpr  uint32_t MAX_SUPPORTED_SIZE = ( 10 * 512000 * 8 * 2u);
 namespace android {
 
 static C2R MultiAccessUnitParamsSetter(
@@ -39,8 +40,6 @@
         res = res.plus(C2SettingResultBuilder::BadValue(me.F(me.v.thresholdSize)));
     } else if (me.v.maxSize < me.v.thresholdSize) {
         me.set().maxSize = me.v.thresholdSize;
-    } else if (me.v.thresholdSize == 0 && me.v.maxSize > 0) {
-        me.set().thresholdSize = me.v.maxSize;
     }
     std::vector<std::unique_ptr<C2SettingResult>> failures;
     res.retrieveFailures(&failures);
@@ -61,9 +60,9 @@
             .withDefault(new C2LargeFrame::output(0u, 0, 0))
             .withFields({
                 C2F(mLargeFrameParams, maxSize).inRange(
-                        0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u)),
+                        0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE)),
                 C2F(mLargeFrameParams, thresholdSize).inRange(
-                        0, c2_min(UINT_MAX, 10 * 512000 * 8 * 2u))
+                        0, c2_min(UINT_MAX, MAX_SUPPORTED_SIZE))
             })
             .withSetter(MultiAccessUnitParamsSetter)
             .build());
@@ -115,6 +114,18 @@
     return false;
 }
 
+bool MultiAccessUnitInterface::getMaxInputSize(
+        C2StreamMaxBufferSizeInfo::input* const maxInputSize) const {
+    if (maxInputSize == nullptr || mC2ComponentIntf == nullptr) {
+        return false;
+    }
+    c2_status_t err = mC2ComponentIntf->query_vb({maxInputSize}, {}, C2_MAY_BLOCK, nullptr);
+    if (err != OK) {
+        return false;
+    }
+    return true;
+}
+
 //C2MultiAccessUnitBuffer
 class C2MultiAccessUnitBuffer : public C2Buffer {
     public:
@@ -128,6 +139,7 @@
 MultiAccessUnitHelper::MultiAccessUnitHelper(
         const std::shared_ptr<MultiAccessUnitInterface>& intf,
         std::shared_ptr<C2BlockPool>& linearPool):
+        mMultiAccessOnOffAllowed(true),
         mInit(false),
         mInterface(intf),
         mLinearPool(linearPool) {
@@ -152,6 +164,63 @@
     return result;
 }
 
+bool MultiAccessUnitHelper::tryReconfigure(const std::unique_ptr<C2Param> &param) {
+    C2LargeFrame::output *lfp = C2LargeFrame::output::From(param.get());
+    if (lfp == nullptr) {
+        return false;
+    }
+    bool isDecoder = (mInterface->kind() == C2Component::KIND_DECODER) ? true : false;
+    if (!isDecoder) {
+        C2StreamMaxBufferSizeInfo::input maxInputSize(0);
+        if (!mInterface->getMaxInputSize(&maxInputSize)) {
+            LOG(ERROR) << "Error in reconfigure: "
+                    << "Encoder failed to respond with a valid max input size";
+            return false;
+        }
+        // This is assuming a worst case compression ratio of 1:1
+        // In no case the encoder should give an output more than
+        // what is being provided to the encoder in a single call.
+        if (lfp->maxSize < maxInputSize.value) {
+            lfp->maxSize = maxInputSize.value;
+        }
+    }
+    lfp->maxSize =
+            (lfp->maxSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+                    (lfp->maxSize < 0) ? 0 : lfp->maxSize;
+    lfp->thresholdSize =
+            (lfp->thresholdSize > MAX_SUPPORTED_SIZE) ? MAX_SUPPORTED_SIZE :
+                    (lfp->thresholdSize < 0) ? 0 : lfp->thresholdSize;
+    C2LargeFrame::output currentConfig = mInterface->getLargeFrameParam();
+    if ((currentConfig.maxSize == lfp->maxSize)
+            && (currentConfig.thresholdSize == lfp->thresholdSize)) {
+        // no need to update
+        return false;
+    }
+    if (isDecoder) {
+        bool isOnOffTransition =
+                (currentConfig.maxSize == 0 && lfp->maxSize != 0)
+                || (currentConfig.maxSize != 0 && lfp->maxSize == 0);
+            if (isOnOffTransition && !mMultiAccessOnOffAllowed) {
+                LOG(ERROR) << "Setting new configs not allowed"
+                        << " MaxSize: " << lfp->maxSize
+                        << " ThresholdSize: " << lfp->thresholdSize;
+                return false;
+            }
+    }
+    std::vector<C2Param*> config{lfp};
+    std::vector<std::unique_ptr<C2SettingResult>> failures;
+    if (C2_OK != mInterface->config(config, C2_MAY_BLOCK, &failures)) {
+        LOG(ERROR) << "Dynamic config not applied for"
+                << " MaxSize: " << lfp->maxSize
+                << " ThresholdSize: " << lfp->thresholdSize;
+        return false;
+    }
+    LOG(DEBUG) << "Updated from param maxSize "
+            << lfp->maxSize
+            << " ThresholdSize " << lfp->thresholdSize;
+    return true;
+}
+
 std::shared_ptr<MultiAccessUnitInterface> MultiAccessUnitHelper::getInterface() {
     return mInterface;
 }
@@ -163,6 +232,7 @@
 void MultiAccessUnitHelper::reset() {
     std::lock_guard<std::mutex> l(mLock);
     mFrameHolder.clear();
+    mMultiAccessOnOffAllowed = true;
 }
 
 c2_status_t MultiAccessUnitHelper::error(
@@ -181,6 +251,7 @@
         }
     }
     mFrameHolder.clear();
+    mMultiAccessOnOffAllowed = true;
     return C2_OK;
 }
 
@@ -232,16 +303,23 @@
         uint64_t newFrameIdx = mFrameIndex++;
         // TODO: Do not split buffers if component inherantly supports MultipleFrames.
         // if thats case, only replace frameindex.
-        auto cloneInputWork = [&newFrameIdx](std::unique_ptr<C2Work>& inWork, uint32_t flags) {
+        auto cloneInputWork = [&frameInfo, &newFrameIdx, this]
+                (std::unique_ptr<C2Work>& inWork, uint32_t flags) -> std::unique_ptr<C2Work> {
             std::unique_ptr<C2Work> newWork(new C2Work);
             newWork->input.flags = (C2FrameData::flags_t)flags;
             newWork->input.ordinal = inWork->input.ordinal;
             newWork->input.ordinal.frameIndex = newFrameIdx;
             if (!inWork->input.configUpdate.empty()) {
                 for (std::unique_ptr<C2Param>& param : inWork->input.configUpdate) {
-                    newWork->input.configUpdate.push_back(
-                            std::move(C2Param::Copy(*(param.get()))));
+                    if (param->index() == C2LargeFrame::output::PARAM_TYPE) {
+                        if (tryReconfigure(param)) {
+                            frameInfo.mConfigUpdate.push_back(std::move(param));
+                        }
+                    } else {
+                        newWork->input.configUpdate.push_back(std::move(param));
+                    }
                 }
+                inWork->input.configUpdate.clear();
             }
             newWork->input.infoBuffers = (inWork->input.infoBuffers);
             if (!inWork->worklets.empty() && inWork->worklets.front() != nullptr) {
@@ -249,10 +327,10 @@
                 newWork->worklets.front()->component = inWork->worklets.front()->component;
                 std::vector<std::unique_ptr<C2Tuning>> tunings;
                 for (std::unique_ptr<C2Tuning>& tuning : inWork->worklets.front()->tunings) {
-                    tunings.push_back(std::move(
+                    tunings.push_back(
                             std::unique_ptr<C2Tuning>(
                                     static_cast<C2Tuning*>(
-                                            C2Param::Copy(*(tuning.get())).release()))));
+                                            C2Param::Copy(*(tuning.get())).release())));
                 }
                 newWork->worklets.front()->tunings = std::move(tunings);
             }
@@ -266,7 +344,7 @@
                     << inputOrdinal.frameIndex.peekull()
                     << ") -> newFrameIndex " << newFrameIdx
                     <<" : input ts " << inputOrdinal.timestamp.peekull();
-            sliceWork.push_back(std::move(cloneInputWork(w, w->input.flags)));
+            sliceWork.push_back(cloneInputWork(w, w->input.flags));
             if (!w->input.buffers.empty() && w->input.buffers.front() != nullptr) {
                 sliceWork.back()->input.buffers = std::move(w->input.buffers);
             }
@@ -331,6 +409,7 @@
             frameInfo.mLargeFrameTuning = multiAccessParams;
             std::lock_guard<std::mutex> l(mLock);
             mFrameHolder.push_back(std::move(frameInfo));
+            mMultiAccessOnOffAllowed = false;
         }
     }
     return C2_OK;
@@ -360,6 +439,7 @@
             std::list<MultiAccessUnitInfo>::iterator frame =
                     mFrameHolder.begin();
             while (!foundFrame && frame != mFrameHolder.end()) {
+                c2_status_t res = C2_OK;
                 auto it = frame->mComponentFrameIds.find(thisFrameIndex);
                 if (it != frame->mComponentFrameIds.end()) {
                     foundFrame = true;
@@ -369,8 +449,7 @@
                     if (work->result != C2_OK
                             || work->worklets.empty()
                             || !work->worklets.front()
-                            || (frame->mLargeFrameTuning.thresholdSize == 0
-                            || frame->mLargeFrameTuning.maxSize == 0)) {
+                            || frame->mLargeFrameTuning.maxSize == 0) {
                         if (removeEntry) {
                             frame->mComponentFrameIds.erase(it);
                             removeEntry = false;
@@ -388,10 +467,27 @@
                         addOutWork(frame->mLargeWork);
                         frame->reset();
                         if (workResult != C2_OK) {
-                            frame->mAccessUnitInfos.clear();
+                            frame->mComponentFrameIds.clear();
+                            removeEntry = false;
                         }
-                    } else if (C2_OK != processWorklets(*frame, work, addOutWork)) {
-                        LOG(DEBUG) << "Error while processing work";
+                    } else if (C2_OK != (res = processWorklets(*frame, work, addOutWork))) {
+                        // Upon error in processing worklets, we return the work with
+                        // result set to the error. This should indicate the error to the
+                        // framework and thus doing what is necessary to handle the
+                        // error.
+                        LOG(DEBUG) << "Error while processing worklets";
+                        if (frame->mLargeWork == nullptr) {
+                            frame->mLargeWork.reset(new C2Work);
+                            frame->mLargeWork->input.ordinal = frame->inOrdinal;
+                            frame->mLargeWork->input.ordinal.frameIndex =
+                                    frame->inOrdinal.frameIndex;
+                        }
+                        frame->mLargeWork->result = res;
+                        finalizeWork(*frame);
+                        addOutWork(frame->mLargeWork);
+                        frame->reset();
+                        frame->mComponentFrameIds.clear();
+                        removeEntry = false;
                     }
                     if (removeEntry) {
                         LOG(DEBUG) << "Removing entry: " << thisFrameIndex
@@ -401,9 +497,10 @@
                     // This is to take care of the last bytes and to decide to send with
                     // FLAG_INCOMPLETE or not.
                     if ((frame->mWview
-                            && (frame->mWview->offset() > frame->mLargeFrameTuning.thresholdSize))
+                            && (frame->mWview->offset() >= frame->mLargeFrameTuning.thresholdSize))
                             || frame->mComponentFrameIds.empty()) {
                         if (frame->mLargeWork) {
+                            frame->mLargeWork->result = C2_OK;
                             finalizeWork(*frame);
                             addOutWork(frame->mLargeWork);
                             frame->reset();
@@ -462,12 +559,15 @@
         c2_status_t ret = C2_OK;
         if (frame.mLargeWork == nullptr) {
             frame.mLargeWork.reset(new C2Work);
+            frame.mLargeWork->result = C2_OK;
+            frame.mLargeWork->input.flags = (C2FrameData::flags_t)0;
             frame.mLargeWork->input.ordinal = frame.inOrdinal;
             frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
         }
         if (allocateWorket) {
             if (frame.mLargeWork->worklets.size() == 0) {
                 frame.mLargeWork->worklets.emplace_back(new C2Worklet);
+                frame.mLargeWork->worklets.back()->output.flags = (C2FrameData::flags_t)0;
             }
         }
         if (allocateBuffer) {
@@ -515,6 +615,9 @@
         if (c2ret != C2_OK) {
             return c2ret;
         }
+        uint32_t flags = work->input.flags;
+        flags |= frame.mLargeWork->input.flags;
+        frame.mLargeWork->input.flags = (C2FrameData::flags_t)flags;
         C2FrameData& outputFramedata = frame.mLargeWork->worklets.front()->output;
         if (!(*worklet)->output.configUpdate.empty()) {
             for (auto& configUpdate : (*worklet)->output.configUpdate) {
@@ -528,9 +631,6 @@
 
         LOG(DEBUG) << "maxOutSize " << frame.mLargeFrameTuning.maxSize
                 << " threshold " << frame.mLargeFrameTuning.thresholdSize;
-        if ((*worklet)->output.buffers.size() > 0) {
-            allocateWork(frame, true, true);
-        }
         LOG(DEBUG) << "This worklet has " << (*worklet)->output.buffers.size() << " buffers"
                 << " ts: " << (*worklet)->output.ordinal.timestamp.peekull();
         int64_t workletTimestamp = (*worklet)->output.ordinal.timestamp.peekull();
@@ -552,43 +652,42 @@
                     inputSize -= (inputSize % frameSize);
                 }
                 while (inputOffset < inputSize) {
-                    if (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize) {
+                    if ((frame.mWview != nullptr)
+                            && (frame.mWview->offset() >= frame.mLargeFrameTuning.thresholdSize)) {
                         frame.mLargeWork->result = C2_OK;
                         finalizeWork(frame, flagsForCopy);
                         addWork(frame.mLargeWork);
                         frame.reset();
-                        allocateWork(frame, true, true);
                     }
                     if (mInterface->kind() == C2Component::KIND_ENCODER) {
                         if (inputSize > frame.mLargeFrameTuning.maxSize) {
-                            LOG(ERROR) << "Enc: Output buffer too small for AU, configured with "
-                                    << frame.mLargeFrameTuning.maxSize
-                                    << " block size: " << blocks.front().size()
-                                    << "alloc size " << frame.mWview->size();
-                            if (frame.mLargeWork
-                                    && frame.mWview && frame.mWview->offset() > 0) {
+                            LOG(WARNING) << "WARNING Encoder:"
+                                    << " Output buffer too small for configuration"
+                                    << " configured max size " << frame.mLargeFrameTuning.maxSize
+                                    << " access unit size " << inputSize;
+                            if (frame.mLargeWork && (frame.mWview && frame.mWview->offset() > 0)) {
+                                frame.mLargeWork->result = C2_OK;
                                 finalizeWork(frame, flagsForCopy);
                                 addWork(frame.mLargeWork);
                                 frame.reset();
-                                allocateWork(frame, true, false);
                             }
-                            frame.mLargeWork->result = C2_NO_MEMORY;
-                            finalizeWork(frame, 0, true);
-                            addWork(frame.mLargeWork);
-                            frame.reset();
-                            return C2_NO_MEMORY;
-                        } else if (inputSize > frame.mWview->size()) {
+                            frame.mLargeFrameTuning.maxSize = inputSize;
+                        } else if ((frame.mWview != nullptr)
+                                && (inputSize > frame.mWview->size())) {
                             LOG(DEBUG) << "Enc: Large frame hitting bufer limit, current size "
                                 << frame.mWview->offset();
-                            if (frame.mLargeWork
-                                    && frame.mWview && frame.mWview->offset() > 0) {
+                            if (frame.mWview->offset() > 0) {
+                                frame.mLargeWork->result = C2_OK;
                                 finalizeWork(frame, flagsForCopy);
                                 addWork(frame.mLargeWork);
                                 frame.reset();
-                                allocateWork(frame, true, true);
                             }
                         }
                     }
+                    allocateWork(frame, true, true);
+                    uint32_t flags = work->input.flags;
+                    flags |= frame.mLargeWork->input.flags;
+                    frame.mLargeWork->input.flags = (C2FrameData::flags_t)flags;
                     C2ReadView rView = blocks.front().map().get();
                     if (rView.error()) {
                         LOG(ERROR) << "Buffer read view error";
@@ -655,7 +754,8 @@
     }
     LOG(DEBUG) << "Finalizing work with input Idx "
             << frame.mLargeWork->input.ordinal.frameIndex.peekull()
-            << " timestamp " << timeStampUs;
+            << " timestamp " << timeStampUs
+            << " inFlags " << inFlags;
     uint32_t finalFlags = 0;
     if ((!forceComplete)
             && (frame.mLargeWork->result == C2_OK)
@@ -683,26 +783,39 @@
             frame.mWview->setOffset(0);
             std::shared_ptr<C2Buffer> c2Buffer = C2Buffer::CreateLinearBuffer(
                     frame.mBlock->share(0, size, ::C2Fence()));
-            if (frame.mAccessUnitInfos.size() > 0) {
-                if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
-                    frame.mAccessUnitInfos.back().flags |=
-                            C2FrameData::FLAG_END_OF_STREAM;
-                }
-                std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
-                        C2AccessUnitInfos::output::AllocShared(
-                        frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
-                frame.mInfos.push_back(largeFrame);
-                frame.mAccessUnitInfos.clear();
-            }
-            for (auto &info : frame.mInfos) {
-                c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
-            }
             frame.mLargeWork->worklets.front()->output.buffers.push_back(std::move(c2Buffer));
-            frame.mInfos.clear();
-            frame.mBlock.reset();
-            frame.mWview.reset();
+        }
+        if (frame.mLargeWork->worklets.front()->output.buffers.size() > 0) {
+            std::shared_ptr<C2Buffer>& c2Buffer =
+                frame.mLargeWork->worklets.front()->output.buffers.front();
+            if (c2Buffer != nullptr) {
+                if (frame.mAccessUnitInfos.size() > 0) {
+                    if (finalFlags & C2FrameData::FLAG_END_OF_STREAM) {
+                        frame.mAccessUnitInfos.back().flags |= C2FrameData::FLAG_END_OF_STREAM;
+                    }
+                    std::shared_ptr<C2AccessUnitInfos::output> largeFrame =
+                            C2AccessUnitInfos::output::AllocShared(
+                                    frame.mAccessUnitInfos.size(), 0u, frame.mAccessUnitInfos);
+                    frame.mInfos.push_back(largeFrame);
+                    frame.mAccessUnitInfos.clear();
+                }
+                for (auto &info : frame.mInfos) {
+                    c2Buffer->setInfo(std::const_pointer_cast<C2Info>(info));
+                }
+            }
+        }
+        if (frame.mConfigUpdate.size() > 0) {
+            outFrameData.configUpdate.insert(
+                    outFrameData.configUpdate.end(),
+                    make_move_iterator(frame.mConfigUpdate.begin()),
+                    make_move_iterator(frame.mConfigUpdate.end()));
         }
     }
+    frame.mConfigUpdate.clear();
+    frame.mInfos.clear();
+    frame.mBlock.reset();
+    frame.mWview.reset();
+
     LOG(DEBUG) << "Multi access-unitflag setting as " << finalFlags;
     return C2_OK;
 }
@@ -735,8 +848,9 @@
     mBlock.reset();
     mWview.reset();
     mInfos.clear();
+    mConfigUpdate.clear();
     mAccessUnitInfos.clear();
     mLargeWork.reset();
 }
 
-}  // namespace android
\ No newline at end of file
+}  // namespace android
diff --git a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
index bb4464c..070a1f5 100644
--- a/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
+++ b/media/codec2/hal/common/include/codec2/common/MultiAccessUnitHelper.h
@@ -46,6 +46,7 @@
 protected:
     bool getDecoderSampleRateAndChannelCount(
             uint32_t * const sampleRate_, uint32_t * const channelCount_) const;
+    bool getMaxInputSize(C2StreamMaxBufferSizeInfo::input* const maxInputSize) const;
     const std::shared_ptr<C2ComponentInterface> mC2ComponentIntf;
     std::shared_ptr<C2LargeFrame::output> mLargeFrameParams;
     C2ComponentKindSetting mKind;
@@ -140,6 +141,11 @@
         std::vector<std::shared_ptr<const C2Info>> mInfos;
 
         /*
+         * Vector for holding config updates from the wrapper
+         */
+        std::vector<std::unique_ptr<C2Param>> mConfigUpdate;
+
+        /*
          * C2AccessUnitInfos for the current buffer
          */
         std::vector<C2AccessUnitInfosStruct> mAccessUnitInfos;
@@ -170,6 +176,11 @@
     };
 
     /*
+     * Reconfigure helper
+     */
+    bool tryReconfigure(const std::unique_ptr<C2Param> &p);
+
+    /*
      * Creates a linear block to be used with work
      */
     c2_status_t createLinearBlock(MultiAccessUnitInfo &frame);
@@ -195,6 +206,14 @@
             uint32_t size,
             int64_t timestamp);
 
+    // Flag to allow dynamic on/off settings on this helper.
+    // Once enabled and buffers in transit, it is not possible
+    // to turn this module off by setting the max output value
+    // to 0. This is because the skip cut buffer expects the
+    // metadata to be always present along with a valid buffer.
+    // This flag is used to monitor that state of this module.
+    bool mMultiAccessOnOffAllowed;
+
     bool mInit;
 
     // Interface of this module
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index 0259d90..62f0e25 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -521,7 +521,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
diff --git a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
index 988ab6f..108ba06 100644
--- a/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/ComponentStore.cpp
@@ -139,7 +139,23 @@
     SetPreferredCodec2ComponentStore(store);
 
     // Retrieve struct descriptors
-    mParamReflector = mStore->getParamReflector();
+    mParamReflectors.push_back(mStore->getParamReflector());
+#ifndef __ANDROID_APEX__
+    std::shared_ptr<C2ParamReflector> paramReflector =
+        GetFilterWrapper()->getParamReflector();
+    if (paramReflector != nullptr) {
+        ALOGD("[%s] added param reflector from filter wrapper", mStore->getName().c_str());
+        mParamReflectors.push_back(paramReflector);
+    }
+#endif
+
+    // MultiAccessUnit reflector helper is allocated once per store.
+    // All components in this store can reuse this reflector helper.
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        std::shared_ptr<C2ReflectorHelper> helper = std::make_shared<C2ReflectorHelper>();
+        mParamReflectors.push_back(helper);
+        mMultiAccessUnitReflector = helper;
+    }
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -168,8 +184,7 @@
         std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
         auto it = mStructDescriptors.find(coreIndex);
         if (it == mStructDescriptors.end()) {
-            std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+            std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
             if (!structDesc) {
                 // All supported params must be described
                 res = C2_BAD_INDEX;
@@ -200,12 +215,20 @@
     if (c2interface == nullptr) {
         return nullptr;
     }
+    // Framework support for Large audio frame feature depends on:
+    // 1. All feature flags enabled on platform
+    // 2. The capability of the implementation to use the same input buffer
+    //    for different C2Work (C2Config::api_feature_t::API_SAME_INPUT_BUFFER)
+    // 3. Implementation does not inherently support C2LargeFrame::output::PARAM_TYPE param.
     if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
         c2_status_t err = C2_OK;
         C2ComponentDomainSetting domain;
         std::vector<std::unique_ptr<C2Param>> heapParams;
-        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
-        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+        C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+        err = c2interface->query_vb({&domain, &features}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK
+                && (domain.value == C2Component::DOMAIN_AUDIO)
+                && ((features.value & C2Config::api_feature_t::API_SAME_INPUT_BUFFER) != 0)) {
             std::vector<std::shared_ptr<C2ParamDescriptor>> params;
             bool isComponentSupportsLargeAudioFrame = false;
             c2interface->querySupportedParams_nb(&params);
@@ -217,7 +240,8 @@
             }
             if (!isComponentSupportsLargeAudioFrame) {
                 multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
-                        c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+                        c2interface,
+                        mMultiAccessUnitReflector);
             }
         }
     }
@@ -339,8 +363,7 @@
         if (item == mStructDescriptors.end()) {
             // not in the cache, and not known to be unsupported, query local reflector
             if (!mUnsupportedStructDescriptors.count(coreIndex)) {
-                std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+                std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
                 if (!structDesc) {
                     mUnsupportedStructDescriptors.emplace(coreIndex);
                 } else {
@@ -386,6 +409,16 @@
     return mConfigurable;
 }
 
+std::shared_ptr<C2StructDescriptor> ComponentStore::describe(const C2Param::CoreIndex &index) {
+    for (const std::shared_ptr<C2ParamReflector> &reflector : mParamReflectors) {
+        std::shared_ptr<C2StructDescriptor> desc = reflector->describe(index);
+        if (desc) {
+            return desc;
+        }
+    }
+    return nullptr;
+}
+
 // Called from createComponent() after a successful creation of `component`.
 void ComponentStore::reportComponentBirth(Component* component) {
     ComponentStatus componentStatus;
diff --git a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
index b5d85da..028238b 100644
--- a/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.0/utils/include/codec2/hidl/1.0/ComponentStore.h
@@ -55,6 +55,13 @@
 using ::android::sp;
 
 struct ComponentStore : public IComponentStore {
+    /**
+     * Constructor for ComponentStore.
+     *
+     * IMPORTANT: SetPreferredCodec2ComponentStore() is called in the constructor.
+     * Be careful about the order of SetPreferredCodec2ComponentStore() and
+     * ComponentStore() in the code.
+     */
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -117,9 +124,15 @@
     // Does bookkeeping for an interface that has been loaded.
     void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
 
+    // describe from mParamReflectors
+    std::shared_ptr<C2StructDescriptor> describe(const C2Param::CoreIndex &index);
+
     c2_status_t mInit;
     std::shared_ptr<C2ComponentStore> mStore;
-    std::shared_ptr<C2ParamReflector> mParamReflector;
+    std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
+
+    // Reflector helper for MultiAccessUnitHelper
+    std::shared_ptr<C2ReflectorHelper> mMultiAccessUnitReflector;
 
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
index ab47b7c..36907e1 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/component/VtsHalMediaC2V1_0TargetComponentTest.cpp
@@ -263,9 +263,6 @@
     ALOGV("mComponent->reset() timeConsumed=%" PRId64 " us", timeConsumed);
     ASSERT_EQ(err, C2_OK);
 
-    err = mComponent->start();
-    ASSERT_EQ(err, C2_OK);
-
     // Query supported params by the component
     std::vector<std::shared_ptr<C2ParamDescriptor>> params;
     startTime = getNowUs();
@@ -298,6 +295,9 @@
               timeConsumed);
     }
 
+    err = mComponent->start();
+    ASSERT_EQ(err, C2_OK);
+
     std::list<std::unique_ptr<C2Work>> workList;
     startTime = getNowUs();
     err = mComponent->queue(&workList);
diff --git a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
index f8fd425..90d1874 100644
--- a/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
+++ b/media/codec2/hal/hidl/1.0/vts/functional/video/VtsHalMediaC2V1_0TargetVideoDecTest.cpp
@@ -31,6 +31,7 @@
 #include <C2Debug.h>
 #include <codec2/common/HalSelection.h>
 #include <codec2/hidl/client.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferQueue.h>
 #include <gui/IConsumerListener.h>
 #include <gui/IProducerListener.h>
@@ -139,6 +140,20 @@
         mReorderDepth = -1;
         mTimestampDevTest = false;
         mMd5Offset = 0;
+        mIsTunneledCodec = false;
+
+        // For C2 codecs that support tunneling by default, the default value of
+        // C2PortTunneledModeTuning::mode should (!= NONE). Otherwise VTS
+        // can assume that the codec can support regular (non-tunneled decode)
+        queried.clear();
+        c2err = mComponent->query(
+                {}, {C2PortTunneledModeTuning::output::PARAM_TYPE}, C2_MAY_BLOCK, &queried);
+        if (c2err == C2_OK && !queried.empty() && queried.front() != nullptr) {
+            C2TunneledModeStruct::mode_t tunneledMode =
+                    ((C2PortTunneledModeTuning::output*)queried.front().get())->m.mode;
+            mIsTunneledCodec = (tunneledMode != C2TunneledModeStruct::NONE);
+        }
+
         mMd5Enable = false;
         mRefMd5 = nullptr;
 
@@ -308,6 +323,7 @@
 
     bool mEos;
     bool mDisableTest;
+    bool mIsTunneledCodec;
     bool mMd5Enable;
     bool mTimestampDevTest;
     uint64_t mTimestampUs;
@@ -408,7 +424,6 @@
                       surfaceMode_t surfMode) {
     using namespace android;
     sp<IGraphicBufferProducer> producer = nullptr;
-    sp<IGraphicBufferConsumer> consumer = nullptr;
     sp<GLConsumer> texture = nullptr;
     sp<ANativeWindow> surface = nullptr;
     static std::atomic_uint32_t surfaceGeneration{0};
@@ -427,6 +442,16 @@
     }
 
     if (surfMode == SURFACE) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        texture = new GLConsumer(0 /* tex */, GLConsumer::TEXTURE_EXTERNAL, true /* useFenceSync */,
+                                 false /* isControlledByApp */);
+        sp<Surface> s = texture->getSurface();
+        surface = s;
+        ASSERT_NE(surface, nullptr) << "failed to create Surface object";
+
+        producer = s->getIGraphicBufferProducer();
+#else
+        sp<IGraphicBufferConsumer> consumer = nullptr;
         BufferQueue::createBufferQueue(&producer, &consumer);
         ASSERT_NE(producer, nullptr) << "createBufferQueue returned invalid producer";
         ASSERT_NE(consumer, nullptr) << "createBufferQueue returned invalid consumer";
@@ -437,6 +462,7 @@
 
         surface = new Surface(producer);
         ASSERT_NE(surface, nullptr) << "failed to create Surface object";
+#endif // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         producer->setGenerationNumber(generation);
     }
@@ -612,11 +638,14 @@
 
     bool signalEOS = std::get<3>(GetParam());
     surfaceMode_t surfMode = std::get<4>(GetParam());
-    mTimestampDevTest = true;
+    // Disable checking timestamp as tunneled codecs doesn't populate
+    // output buffers in C2Work.
+    mTimestampDevTest = !mIsTunneledCodec;
 
     android::Vector<FrameInfo> Info;
 
-    mMd5Enable = true;
+    // Disable md5 checks as tunneled codecs doesn't populate output buffers in C2Work
+    mMd5Enable = !mIsTunneledCodec;
     if (!mChksumFile.compare(sResourceDir)) mMd5Enable = false;
 
     uint32_t format = HAL_PIXEL_FORMAT_YCBCR_420_888;
@@ -712,7 +741,9 @@
     typedef std::unique_lock<std::mutex> ULock;
     ASSERT_EQ(mComponent->start(), C2_OK);
 
-    mTimestampDevTest = true;
+    // Disable checking timestamp as tunneled codecs doesn't populate
+    // output buffers in C2Work.
+    mTimestampDevTest = !mIsTunneledCodec;
     uint32_t timestampOffset = 0;
     uint32_t offset = 0;
     android::Vector<FrameInfo> Info;
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index d34d84e..7f2c4dd 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -527,7 +527,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
diff --git a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
index 46af809..84f5d26 100644
--- a/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/ComponentStore.cpp
@@ -139,7 +139,23 @@
     SetPreferredCodec2ComponentStore(store);
 
     // Retrieve struct descriptors
-    mParamReflector = mStore->getParamReflector();
+    mParamReflectors.push_back(mStore->getParamReflector());
+#ifndef __ANDROID_APEX__
+    std::shared_ptr<C2ParamReflector> paramReflector =
+        GetFilterWrapper()->getParamReflector();
+    if (paramReflector != nullptr) {
+        ALOGD("[%s] added param reflector from filter wrapper", mStore->getName().c_str());
+        mParamReflectors.push_back(paramReflector);
+    }
+#endif
+
+    // MultiAccessUnit reflector helper is allocated once per store.
+    // All components in this store can reuse this reflector helper.
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        std::shared_ptr<C2ReflectorHelper> helper = std::make_shared<C2ReflectorHelper>();
+        mParamReflectors.push_back(helper);
+        mMultiAccessUnitReflector = helper;
+    }
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -168,8 +184,7 @@
         std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
         auto it = mStructDescriptors.find(coreIndex);
         if (it == mStructDescriptors.end()) {
-            std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+            std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
             if (!structDesc) {
                 // All supported params must be described
                 res = C2_BAD_INDEX;
@@ -200,12 +215,20 @@
     if (c2interface == nullptr) {
         return nullptr;
     }
+    // Framework support for Large audio frame feature depends on:
+    // 1. All feature flags enabled on platform
+    // 2. The capability of the implementation to use the same input buffer
+    //    for different C2Work (C2Config::api_feature_t::API_SAME_INPUT_BUFFER)
+    // 3. Implementation does not inherently support C2LargeFrame::output::PARAM_TYPE param.
     if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
         c2_status_t err = C2_OK;
         C2ComponentDomainSetting domain;
         std::vector<std::unique_ptr<C2Param>> heapParams;
-        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
-        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+        C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+        err = c2interface->query_vb({&domain, &features}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK
+                && (domain.value == C2Component::DOMAIN_AUDIO)
+                && ((features.value & C2Config::api_feature_t::API_SAME_INPUT_BUFFER) != 0)) {
             std::vector<std::shared_ptr<C2ParamDescriptor>> params;
             bool isComponentSupportsLargeAudioFrame = false;
             c2interface->querySupportedParams_nb(&params);
@@ -215,10 +238,10 @@
                     break;
                 }
             }
-
             if (!isComponentSupportsLargeAudioFrame) {
                 multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
-                        c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+                        c2interface,
+                        mMultiAccessUnitReflector);
             }
         }
     }
@@ -340,8 +363,7 @@
         if (item == mStructDescriptors.end()) {
             // not in the cache, and not known to be unsupported, query local reflector
             if (!mUnsupportedStructDescriptors.count(coreIndex)) {
-                std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+                std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
                 if (!structDesc) {
                     mUnsupportedStructDescriptors.emplace(coreIndex);
                 } else {
@@ -423,6 +445,16 @@
     return Void();
 }
 
+std::shared_ptr<C2StructDescriptor> ComponentStore::describe(const C2Param::CoreIndex &index) {
+    for (const std::shared_ptr<C2ParamReflector> &reflector : mParamReflectors) {
+        std::shared_ptr<C2StructDescriptor> desc = reflector->describe(index);
+        if (desc) {
+            return desc;
+        }
+    }
+    return nullptr;
+}
+
 // Called from createComponent() after a successful creation of `component`.
 void ComponentStore::reportComponentBirth(Component* component) {
     ComponentStatus componentStatus;
diff --git a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
index 85862a9..b023115 100644
--- a/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.1/utils/include/codec2/hidl/1.1/ComponentStore.h
@@ -56,6 +56,13 @@
 using ::android::sp;
 
 struct ComponentStore : public IComponentStore {
+    /**
+     * Constructor for ComponentStore.
+     *
+     * IMPORTANT: SetPreferredCodec2ComponentStore() is called in the constructor.
+     * Be careful about the order of SetPreferredCodec2ComponentStore() and
+     * ComponentStore() in the code.
+     */
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -125,9 +132,15 @@
     // Does bookkeeping for an interface that has been loaded.
     void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
 
+    // describe from mParamReflectors
+    std::shared_ptr<C2StructDescriptor> describe(const C2Param::CoreIndex &index);
+
     c2_status_t mInit;
     std::shared_ptr<C2ComponentStore> mStore;
-    std::shared_ptr<C2ParamReflector> mParamReflector;
+    std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
+
+    // Reflector helper for MultiAccessUnitHelper
+    std::shared_ptr<C2ReflectorHelper> mMultiAccessUnitReflector;
 
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index f78e827..7b0aa9b 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -523,7 +523,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
diff --git a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
index f89c835..5585be8 100644
--- a/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/ComponentStore.cpp
@@ -139,7 +139,23 @@
     SetPreferredCodec2ComponentStore(store);
 
     // Retrieve struct descriptors
-    mParamReflector = mStore->getParamReflector();
+    mParamReflectors.push_back(mStore->getParamReflector());
+#ifndef __ANDROID_APEX__
+    std::shared_ptr<C2ParamReflector> paramReflector =
+        GetFilterWrapper()->getParamReflector();
+    if (paramReflector != nullptr) {
+        ALOGD("[%s] added param reflector from filter wrapper", mStore->getName().c_str());
+        mParamReflectors.push_back(paramReflector);
+    }
+#endif
+
+    // MultiAccessUnit reflector helper is allocated once per store.
+    // All components in this store can reuse this reflector helper.
+    if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
+        std::shared_ptr<C2ReflectorHelper> helper = std::make_shared<C2ReflectorHelper>();
+        mParamReflectors.push_back(helper);
+        mMultiAccessUnitReflector = helper;
+    }
 
     // Retrieve supported parameters from store
     using namespace std::placeholders;
@@ -168,8 +184,7 @@
         std::lock_guard<std::mutex> lock(mStructDescriptorsMutex);
         auto it = mStructDescriptors.find(coreIndex);
         if (it == mStructDescriptors.end()) {
-            std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+            std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
             if (!structDesc) {
                 // All supported params must be described
                 res = C2_BAD_INDEX;
@@ -200,12 +215,20 @@
     if (c2interface == nullptr) {
         return nullptr;
     }
+    // Framework support for Large audio frame feature depends on:
+    // 1. All feature flags enabled on platform
+    // 2. The capability of the implementation to use the same input buffer
+    //    for different C2Work (C2Config::api_feature_t::API_SAME_INPUT_BUFFER)
+    // 3. Implementation does not inherently support C2LargeFrame::output::PARAM_TYPE param.
     if (MultiAccessUnitHelper::isEnabledOnPlatform()) {
         c2_status_t err = C2_OK;
         C2ComponentDomainSetting domain;
         std::vector<std::unique_ptr<C2Param>> heapParams;
-        err = c2interface->query_vb({&domain}, {}, C2_MAY_BLOCK, &heapParams);
-        if (err == C2_OK && (domain.value == C2Component::DOMAIN_AUDIO)) {
+        C2ApiFeaturesSetting features = (C2Config::api_feature_t)0;
+        err = c2interface->query_vb({&domain, &features}, {}, C2_MAY_BLOCK, &heapParams);
+        if (err == C2_OK
+                && (domain.value == C2Component::DOMAIN_AUDIO)
+                && ((features.value & C2Config::api_feature_t::API_SAME_INPUT_BUFFER) != 0)) {
             std::vector<std::shared_ptr<C2ParamDescriptor>> params;
             bool isComponentSupportsLargeAudioFrame = false;
             c2interface->querySupportedParams_nb(&params);
@@ -217,7 +240,8 @@
             }
             if (!isComponentSupportsLargeAudioFrame) {
                 multiAccessUnitIntf = std::make_shared<MultiAccessUnitInterface>(
-                        c2interface, std::static_pointer_cast<C2ReflectorHelper>(mParamReflector));
+                        c2interface,
+                        mMultiAccessUnitReflector);
             }
         }
     }
@@ -338,8 +362,7 @@
         if (item == mStructDescriptors.end()) {
             // not in the cache, and not known to be unsupported, query local reflector
             if (!mUnsupportedStructDescriptors.count(coreIndex)) {
-                std::shared_ptr<C2StructDescriptor> structDesc =
-                    mParamReflector->describe(coreIndex);
+                std::shared_ptr<C2StructDescriptor> structDesc = describe(coreIndex);
                 if (!structDesc) {
                     mUnsupportedStructDescriptors.emplace(coreIndex);
                 } else {
@@ -457,6 +480,16 @@
     return Void();
 }
 
+std::shared_ptr<C2StructDescriptor> ComponentStore::describe(const C2Param::CoreIndex &index) {
+    for (const std::shared_ptr<C2ParamReflector> &reflector : mParamReflectors) {
+        std::shared_ptr<C2StructDescriptor> desc = reflector->describe(index);
+        if (desc) {
+            return desc;
+        }
+    }
+    return nullptr;
+}
+
 // Called from createComponent() after a successful creation of `component`.
 void ComponentStore::reportComponentBirth(Component* component) {
     ComponentStatus componentStatus;
diff --git a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
index c08fce4..a7e043b 100644
--- a/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
+++ b/media/codec2/hal/hidl/1.2/utils/include/codec2/hidl/1.2/ComponentStore.h
@@ -56,6 +56,13 @@
 using ::android::sp;
 
 struct ComponentStore : public IComponentStore {
+    /**
+     * Constructor for ComponentStore.
+     *
+     * IMPORTANT: SetPreferredCodec2ComponentStore() is called in the constructor.
+     * Be careful about the order of SetPreferredCodec2ComponentStore() and
+     * ComponentStore() in the code.
+     */
     ComponentStore(const std::shared_ptr<C2ComponentStore>& store);
     virtual ~ComponentStore();
 
@@ -132,9 +139,15 @@
     // Does bookkeeping for an interface that has been loaded.
     void onInterfaceLoaded(const std::shared_ptr<C2ComponentInterface> &intf);
 
+    // describe from mParamReflectors
+    std::shared_ptr<C2StructDescriptor> describe(const C2Param::CoreIndex &index);
+
     c2_status_t mInit;
     std::shared_ptr<C2ComponentStore> mStore;
-    std::shared_ptr<C2ParamReflector> mParamReflector;
+    std::vector<std::shared_ptr<C2ParamReflector>> mParamReflectors;
+
+    // Reflector helper for MultiAccessUnitHelper
+    std::shared_ptr<C2ReflectorHelper> mMultiAccessUnitReflector;
 
     std::map<C2Param::CoreIndex, std::shared_ptr<C2StructDescriptor>> mStructDescriptors;
     std::set<C2Param::CoreIndex> mUnsupportedStructDescriptors;
diff --git a/media/codec2/hal/plugin/FilterWrapper.cpp b/media/codec2/hal/plugin/FilterWrapper.cpp
index 197d6e7..ab6e3eb 100644
--- a/media/codec2/hal/plugin/FilterWrapper.cpp
+++ b/media/codec2/hal/plugin/FilterWrapper.cpp
@@ -49,11 +49,6 @@
             std::weak_ptr<FilterWrapper> filterWrapper)
         : mIntf(intf), mFilterWrapper(filterWrapper) {
         takeFilters(std::move(filters));
-        for (size_t i = 0; i < mFilters.size(); ++i) {
-            mControlParamTypes.insert(
-                    mFilters[i].desc.controlParams.begin(),
-                    mFilters[i].desc.controlParams.end());
-        }
     }
 
     ~WrappedDecoderInterface() override = default;
@@ -91,6 +86,12 @@
 
         // TODO: documentation
         mFilters = std::move(filters);
+        mControlParamTypes.clear();
+        for (size_t i = 0; i < mFilters.size(); ++i) {
+            mControlParamTypes.insert(
+                    mFilters[i].desc.controlParams.begin(),
+                    mFilters[i].desc.controlParams.end());
+        }
         mTypeToIndexForQuery.clear();
         mTypeToIndexForConfig.clear();
         for (size_t i = 0; i < mFilters.size(); ++i) {
@@ -1011,4 +1012,11 @@
     return mPlugin->queryParamsForPreviousComponent(intf, params);
 }
 
+std::shared_ptr<C2ParamReflector> FilterWrapper::getParamReflector() {
+    if (mInit != OK) {
+        return nullptr;
+    }
+    return mStore->getParamReflector();
+}
+
 }  // namespace android
diff --git a/media/codec2/hal/plugin/FilterWrapperStub.cpp b/media/codec2/hal/plugin/FilterWrapperStub.cpp
index 3fd5409..a21f6d0 100644
--- a/media/codec2/hal/plugin/FilterWrapperStub.cpp
+++ b/media/codec2/hal/plugin/FilterWrapperStub.cpp
@@ -57,4 +57,8 @@
     return CreateCodec2BlockPool(allocatorParam, component, pool);
 }
 
+std::shared_ptr<C2ParamReflector> FilterWrapper::getParamReflector() {
+    return nullptr;
+}
+
 }  // namespace android
diff --git a/media/codec2/hal/plugin/internal/FilterWrapper.h b/media/codec2/hal/plugin/internal/FilterWrapper.h
index dcffb5c..c27901e 100644
--- a/media/codec2/hal/plugin/internal/FilterWrapper.h
+++ b/media/codec2/hal/plugin/internal/FilterWrapper.h
@@ -104,6 +104,10 @@
             const std::shared_ptr<C2ComponentInterface> &intf,
             std::vector<std::unique_ptr<C2Param>> *params);
 
+    /**
+     * Return the param reflector of the filter plugin store.
+     */
+    std::shared_ptr<C2ParamReflector> getParamReflector();
 private:
     status_t mInit;
     std::unique_ptr<Plugin> mPlugin;
diff --git a/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
index b5383ad..34872f0 100644
--- a/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
+++ b/media/codec2/hal/plugin/samples/SampleFilterPlugin.cpp
@@ -37,6 +37,19 @@
                 kParamIndexColorAspects | C2Param::CoreIndex::IS_REQUEST_FLAG>
         C2StreamColorAspectsRequestInfo;
 
+// In practice the vendor parameters will be defined in a separate header file,
+// but for the purpose of this sample, we just define it here.
+
+// Vendor-specific type index for filters start from this value. 0x7000 is added to
+// avoid conflict with existing vendor type indices.
+constexpr uint32_t kTypeIndexFilterStart = C2Param::TYPE_INDEX_VENDOR_START + 0x7000;
+// Answer to the Ultimate Question of Life, the Universe, and Everything
+// (Reference to The Hitchhiker's Guide to the Galaxy by Douglas Adams)
+constexpr uint32_t kParamIndexVendorUltimateAnswer = kTypeIndexFilterStart + 0;
+typedef C2StreamParam<C2Info, C2Int32Value, kParamIndexVendorUltimateAnswer>
+        C2StreamVendorUltimateAnswerInfo;
+constexpr char C2_PARAMKEY_VENDOR_ULTIMATE_ANSWER[] = "ultimate-answer";
+
 namespace android {
 
 using namespace std::literals::chrono_literals;
@@ -49,10 +62,9 @@
         static const std::string NAME;
         static const FilterPlugin_V1::Descriptor DESCRIPTOR;
 
-        explicit Interface(c2_node_id_t id)
+        Interface(c2_node_id_t id, const std::shared_ptr<C2ReflectorHelper> &reflector)
             : mId(id),
-              mReflector(std::make_shared<C2ReflectorHelper>()),
-              mHelper(mReflector) {
+              mHelper(reflector) {
         }
         ~Interface() override = default;
         C2String getName() const override { return NAME; }
@@ -126,7 +138,6 @@
         }
     private:
         const c2_node_id_t mId;
-        std::shared_ptr<C2ReflectorHelper> mReflector;
         struct Helper : public C2InterfaceHelper {
             explicit Helper(std::shared_ptr<C2ReflectorHelper> reflector)
                 : C2InterfaceHelper(reflector) {
@@ -266,6 +277,15 @@
                         .build());
 
                 addParameter(
+                        DefineParam(mVendorUltimateAnswerInfo, C2_PARAMKEY_VENDOR_ULTIMATE_ANSWER)
+                        .withDefault(new C2StreamVendorUltimateAnswerInfo::input(0u))
+                        .withFields({
+                            C2F(mVendorUltimateAnswerInfo, value).any(),
+                        })
+                        .withSetter(VendorUltimateAnswerSetter)
+                        .build());
+
+                addParameter(
                         DefineParam(mOutputColorAspectInfo, C2_PARAMKEY_COLOR_ASPECTS)
                         .withDefault(new C2StreamColorAspectsInfo::output(0u))
                         .withFields({
@@ -336,6 +356,15 @@
                 return C2R::Ok();
             }
 
+            static C2R VendorUltimateAnswerSetter(
+                    bool mayBlock,
+                    C2P<C2StreamVendorUltimateAnswerInfo::input> &me) {
+                (void)mayBlock;
+                ALOGI("Answer to the Ultimate Question of Life, the Universe, and Everything "
+                      "set to %d", me.v.value);
+                return C2R::Ok();
+            }
+
             std::shared_ptr<C2ApiFeaturesSetting> mApiFeatures;
 
             std::shared_ptr<C2ComponentNameSetting> mName;
@@ -362,11 +391,13 @@
             std::shared_ptr<C2StreamColorAspectsInfo::input> mInputColorAspectInfo;
             std::shared_ptr<C2StreamColorAspectsInfo::output> mOutputColorAspectInfo;
             std::shared_ptr<C2StreamColorAspectsRequestInfo::output> mColorAspectRequestInfo;
+
+            std::shared_ptr<C2StreamVendorUltimateAnswerInfo::input> mVendorUltimateAnswerInfo;
         } mHelper;
     };
 
-    explicit SampleToneMappingFilter(c2_node_id_t id)
-        : mIntf(std::make_shared<Interface>(id)) {
+    SampleToneMappingFilter(c2_node_id_t id, const std::shared_ptr<C2ReflectorHelper> &reflector)
+        : mIntf(std::make_shared<Interface>(id, reflector)) {
     }
     ~SampleToneMappingFilter() override {
         if (mProcessingThread.joinable()) {
@@ -802,7 +833,10 @@
 // static
 const FilterPlugin_V1::Descriptor SampleToneMappingFilter::Interface::DESCRIPTOR = {
     // controlParams
-    { C2StreamColorAspectsRequestInfo::output::PARAM_TYPE },
+    {
+        C2StreamColorAspectsRequestInfo::output::PARAM_TYPE,
+        C2StreamVendorUltimateAnswerInfo::input::PARAM_TYPE,
+    },
     // affectedParams
     {
         C2StreamHdrStaticInfo::output::PARAM_TYPE,
@@ -815,28 +849,38 @@
     SampleC2ComponentStore()
         : mReflector(std::make_shared<C2ReflectorHelper>()),
           mIntf(mReflector),
-          mFactories(CreateFactories()) {
+          mFactories(CreateFactories(mReflector)) {
     }
     ~SampleC2ComponentStore() = default;
 
     C2String getName() const override { return "android.sample.filter-plugin-store"; }
     c2_status_t createComponent(
             C2String name, std::shared_ptr<C2Component>* const component) override {
-        if (mFactories.count(name) == 0) {
+        auto it = std::find_if(
+                mFactories.begin(), mFactories.end(),
+                [&name](const std::unique_ptr<ComponentFactory> &factory) {
+                    return name == factory->getTraits()->name;
+                });
+        if (it == mFactories.end()) {
             return C2_BAD_VALUE;
         }
-        return mFactories.at(name)->createComponent(++mNodeId, component);
+        return (*it)->createComponent(++mNodeId, component);
     }
     c2_status_t createInterface(
             C2String name, std::shared_ptr<C2ComponentInterface>* const interface) override {
-        if (mFactories.count(name) == 0) {
+        auto it = std::find_if(
+                mFactories.begin(), mFactories.end(),
+                [&name](const std::unique_ptr<ComponentFactory> &factory) {
+                    return name == factory->getTraits()->name;
+                });
+        if (it == mFactories.end()) {
             return C2_BAD_VALUE;
         }
-        return mFactories.at(name)->createInterface(++mNodeId, interface);
+        return (*it)->createInterface(++mNodeId, interface);
     }
     std::vector<std::shared_ptr<const C2Component::Traits>> listComponents() override {
         std::vector<std::shared_ptr<const C2Component::Traits>> ret;
-        for (const auto &[name, factory] : mFactories) {
+        for (const auto &factory : mFactories) {
             ret.push_back(factory->getTraits());
         }
         return ret;
@@ -892,36 +936,44 @@
     template <class T>
     struct ComponentFactoryImpl : public ComponentFactory {
     public:
-        ComponentFactoryImpl(const std::shared_ptr<const C2Component::Traits> &traits)
-            : ComponentFactory(traits) {
+        ComponentFactoryImpl(
+                const std::shared_ptr<const C2Component::Traits> &traits,
+                const std::shared_ptr<C2ReflectorHelper> &reflector)
+            : ComponentFactory(traits),
+              mReflector(reflector) {
         }
         ~ComponentFactoryImpl() override = default;
         c2_status_t createComponent(
                 c2_node_id_t id,
                 std::shared_ptr<C2Component>* const component) const override {
-            *component = std::make_shared<T>(id);
+            *component = std::make_shared<T>(id, mReflector);
             return C2_OK;
         }
         c2_status_t createInterface(
                 c2_node_id_t id,
                 std::shared_ptr<C2ComponentInterface>* const interface) const override {
-            *interface = std::make_shared<typename T::Interface>(id);
+            *interface = std::make_shared<typename T::Interface>(id, mReflector);
             return C2_OK;
         }
+    private:
+        std::shared_ptr<C2ReflectorHelper> mReflector;
     };
 
     template <class T>
-    static void AddFactory(std::map<C2String, std::unique_ptr<ComponentFactory>> *factories) {
-        std::shared_ptr<C2ComponentInterface> intf{new typename T::Interface(0)};
+    static void AddFactory(
+            std::vector<std::unique_ptr<ComponentFactory>> *factories,
+            const std::shared_ptr<C2ReflectorHelper> &reflector) {
+        std::shared_ptr<C2ComponentInterface> intf{new typename T::Interface(0, reflector)};
         std::shared_ptr<C2Component::Traits> traits(new (std::nothrow) C2Component::Traits);
         CHECK(C2InterfaceUtils::FillTraitsFromInterface(traits.get(), intf))
                 << "Failed to fill traits from interface";
-        factories->emplace(traits->name, new ComponentFactoryImpl<T>(traits));
+        factories->emplace_back(new ComponentFactoryImpl<T>(traits, reflector));
     }
 
-    static std::map<C2String, std::unique_ptr<ComponentFactory>> CreateFactories() {
-        std::map<C2String, std::unique_ptr<ComponentFactory>> factories;
-        AddFactory<SampleToneMappingFilter>(&factories);
+    static std::vector<std::unique_ptr<ComponentFactory>> CreateFactories(
+            const std::shared_ptr<C2ReflectorHelper> &reflector) {
+        std::vector<std::unique_ptr<ComponentFactory>> factories;
+        AddFactory<SampleToneMappingFilter>(&factories, reflector);
         return factories;
     }
 
@@ -933,7 +985,7 @@
         }
     } mIntf;
 
-    const std::map<C2String, std::unique_ptr<ComponentFactory>> mFactories;
+    const std::vector<std::unique_ptr<ComponentFactory>> mFactories;
 
     std::atomic_int32_t mNodeId{0};
 };
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 362373e..3c8c1b7 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -84,6 +84,7 @@
         "libui",
         "libutils",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
     ],
 
     export_shared_lib_headers: [
diff --git a/media/codec2/sfplugin/C2AidlNode.cpp b/media/codec2/sfplugin/C2AidlNode.cpp
index 93c9d8b..4e46ad6 100644
--- a/media/codec2/sfplugin/C2AidlNode.cpp
+++ b/media/codec2/sfplugin/C2AidlNode.cpp
@@ -68,10 +68,15 @@
 }
 
 ::ndk::ScopedAStatus C2AidlNode::submitBuffer(
-        int32_t buffer, const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+        int32_t buffer,
+        const std::optional<::aidl::android::hardware::HardwareBuffer>& hBuffer,
         int32_t flags, int64_t timestamp, const ::ndk::ScopedFileDescriptor& fence) {
     sp<GraphicBuffer> gBuf;
-    AHardwareBuffer *ahwb = hBuffer.get();
+    AHardwareBuffer *ahwb = nullptr;
+    if (hBuffer.has_value()) {
+        ahwb = hBuffer.value().get();
+    }
+
     if (ahwb) {
         gBuf = AHardwareBuffer_to_GraphicBuffer(ahwb);
     }
@@ -105,6 +110,10 @@
     return mImpl->onInputBufferDone(index);
 }
 
+void C2AidlNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
 android_dataspace C2AidlNode::getDataspace() {
     return mImpl->getDataspace();
 }
diff --git a/media/codec2/sfplugin/C2AidlNode.h b/media/codec2/sfplugin/C2AidlNode.h
index 365a41d..95290fd 100644
--- a/media/codec2/sfplugin/C2AidlNode.h
+++ b/media/codec2/sfplugin/C2AidlNode.h
@@ -49,7 +49,7 @@
 
     ::ndk::ScopedAStatus submitBuffer(
             int32_t buffer,
-            const ::aidl::android::hardware::HardwareBuffer& hBuffer,
+            const std::optional<::aidl::android::hardware::HardwareBuffer>& hBuffer,
             int32_t flags,
             int64_t timestampUs,
             const ::ndk::ScopedFileDescriptor& fence) override;
@@ -68,13 +68,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/C2NodeImpl.cpp b/media/codec2/sfplugin/C2NodeImpl.cpp
index 6f53e0f..585072d 100644
--- a/media/codec2/sfplugin/C2NodeImpl.cpp
+++ b/media/codec2/sfplugin/C2NodeImpl.cpp
@@ -25,6 +25,7 @@
 #include <C2Debug.h>
 #include <C2PlatformSupport.h>
 
+#include <android_media_codec.h>
 #include <android/fdsan.h>
 #include <media/stagefright/foundation/ColorUtils.h>
 #include <ui/Fence.h>
@@ -373,7 +374,10 @@
     }
     work->worklets.clear();
     work->worklets.emplace_back(new C2Worklet);
-    mBufferIdsInUse.lock()->emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        buffers->mIdsInUse.emplace(work->input.ordinal.frameIndex.peeku(), buffer);
+    }
     mQueueThread->queue(comp, fenceFd, std::move(work), std::move(fd0), std::move(fd1));
 
     return OK;
@@ -405,29 +409,74 @@
 }
 
 void C2NodeImpl::onInputBufferDone(c2_cntr64_t index) {
-    if (mAidlHal) {
-        if (!mAidlBufferSource) {
-            ALOGD("Buffer source not set (index=%llu)", index.peekull());
-            return;
-        }
-    } else {
-        if (!mBufferSource) {
-            ALOGD("Buffer source not set (index=%llu)", index.peekull());
-            return;
-        }
-    }
-
-    int32_t bufferId = 0;
-    {
-        decltype(mBufferIdsInUse)::Locked bufferIds(mBufferIdsInUse);
-        auto it = bufferIds->find(index.peeku());
-        if (it == bufferIds->end()) {
+    if (android::media::codec::provider_->input_surface_throttle()) {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        auto it = buffers->mIdsInUse.find(index.peeku());
+        if (it == buffers->mIdsInUse.end()) {
             ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
             return;
         }
-        bufferId = it->second;
-        (void)bufferIds->erase(it);
+        int32_t bufferId = it->second;
+        (void)buffers->mIdsInUse.erase(it);
+        buffers->mAvailableIds.push_back(bufferId);
+    } else {
+        if (!hasBufferSource()) {
+            return;
+        }
+        int32_t bufferId = 0;
+        {
+            Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+            auto it = buffers->mIdsInUse.find(index.peeku());
+            if (it == buffers->mIdsInUse.end()) {
+                ALOGV("Untracked input index %llu (maybe already removed)", index.peekull());
+                return;
+            }
+            bufferId = it->second;
+            (void)buffers->mIdsInUse.erase(it);
+        }
+        notifyInputBufferEmptied(bufferId);
     }
+}
+
+void C2NodeImpl::onInputBufferEmptied() {
+    if (!android::media::codec::provider_->input_surface_throttle()) {
+        ALOGE("onInputBufferEmptied should not be called "
+              "when input_surface_throttle is false");
+        return;
+    }
+    if (!hasBufferSource()) {
+        return;
+    }
+    int32_t bufferId = 0;
+    {
+        Mutexed<BuffersTracker>::Locked buffers(mBuffersTracker);
+        if (buffers->mAvailableIds.empty()) {
+            ALOGV("The codec is ready to take more input buffers "
+                    "but no input buffers are ready yet.");
+            return;
+        }
+        bufferId = buffers->mAvailableIds.front();
+        buffers->mAvailableIds.pop_front();
+    }
+    notifyInputBufferEmptied(bufferId);
+}
+
+bool C2NodeImpl::hasBufferSource() {
+    if (mAidlHal) {
+        if (!mAidlBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    } else {
+        if (!mBufferSource) {
+            ALOGD("Buffer source not set");
+            return false;
+        }
+    }
+    return true;
+}
+
+void C2NodeImpl::notifyInputBufferEmptied(int32_t bufferId) {
     if (mAidlHal) {
         ::ndk::ScopedFileDescriptor nullFence;
         (void)mAidlBufferSource->onInputBufferEmptied(bufferId, nullFence);
diff --git a/media/codec2/sfplugin/C2NodeImpl.h b/media/codec2/sfplugin/C2NodeImpl.h
index e060fd8..cc826b4 100644
--- a/media/codec2/sfplugin/C2NodeImpl.h
+++ b/media/codec2/sfplugin/C2NodeImpl.h
@@ -73,13 +73,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
@@ -118,12 +124,24 @@
     c2_cntr64_t mPrevInputTimestamp; // input timestamp for previous frame
     c2_cntr64_t mPrevCodecTimestamp; // adjusted (codec) timestamp for previous frame
 
-    Mutexed<std::map<uint64_t, uint32_t>> mBufferIdsInUse;
+    // Tracks the status of buffers
+    struct BuffersTracker {
+        BuffersTracker() = default;
+
+        // Keeps track of buffers that are used by the component. Maps timestamp -> ID
+        std::map<uint64_t, uint32_t> mIdsInUse;
+        // Keeps track of the buffer IDs that are available after being released from the component.
+        std::list<uint32_t> mAvailableIds;
+    };
+    Mutexed<BuffersTracker> mBuffersTracker;
 
     class QueueThread;
     sp<QueueThread> mQueueThread;
 
     bool mAidlHal;
+
+    bool hasBufferSource();
+    void notifyInputBufferEmptied(int32_t bufferId);
 };
 
 }  // namespace android
diff --git a/media/codec2/sfplugin/C2OMXNode.cpp b/media/codec2/sfplugin/C2OMXNode.cpp
index ce02c88..98e25e2 100644
--- a/media/codec2/sfplugin/C2OMXNode.cpp
+++ b/media/codec2/sfplugin/C2OMXNode.cpp
@@ -291,6 +291,10 @@
     return mImpl->onInputBufferDone(index);
 }
 
+void C2OMXNode::onInputBufferEmptied() {
+    return mImpl->onInputBufferEmptied();
+}
+
 android_dataspace C2OMXNode::getDataspace() {
     return mImpl->getDataspace();
 }
diff --git a/media/codec2/sfplugin/C2OMXNode.h b/media/codec2/sfplugin/C2OMXNode.h
index d077202..5549b88 100644
--- a/media/codec2/sfplugin/C2OMXNode.h
+++ b/media/codec2/sfplugin/C2OMXNode.h
@@ -86,13 +86,19 @@
     void setFrameSize(uint32_t width, uint32_t height);
 
     /**
-     * Clean up work item reference.
+     * Notify that the input buffer reference is no longer needed by the component.
+     * Clean up if necessary.
      *
      * \param index input work index
      */
     void onInputBufferDone(c2_cntr64_t index);
 
     /**
+     * Notify input buffer is emptied.
+     */
+    void onInputBufferEmptied();
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     android_dataspace getDataspace();
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 463b63f..632eaed 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -96,11 +96,14 @@
 
 public:
     static sp<CCodecWatchdog> getInstance() {
-        static sp<CCodecWatchdog> instance(new CCodecWatchdog);
-        static std::once_flag flag;
-        // Call Init() only once.
-        std::call_once(flag, Init, instance);
-        return instance;
+        static sp<CCodecWatchdog> sInstance = [] {
+            sp<CCodecWatchdog> instance = new CCodecWatchdog;
+            // the instance should never get destructed
+            instance->incStrong((void *)CCodecWatchdog::getInstance);
+            instance->init();
+            return instance;
+        }();
+        return sInstance;
     }
 
     ~CCodecWatchdog() = default;
@@ -146,11 +149,11 @@
 private:
     CCodecWatchdog() : mLooper(new ALooper) {}
 
-    static void Init(const sp<CCodecWatchdog> &thiz) {
-        ALOGV("Init");
-        thiz->mLooper->setName("CCodecWatchdog");
-        thiz->mLooper->registerHandler(thiz);
-        thiz->mLooper->start();
+    void init() {
+        ALOGV("init");
+        mLooper->setName("CCodecWatchdog");
+        mLooper->registerHandler(this);
+        mLooper->start();
     }
 
     sp<ALooper> mLooper;
@@ -222,19 +225,20 @@
     ~HGraphicBufferSourceWrapper() override = default;
 
     status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
-        mNode = new C2OMXNode(comp);
-        mOmxNode = new hardware::media::omx::V1_0::utils::TWOmxNode(mNode);
-        mNode->setFrameSize(mWidth, mHeight);
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        *node = new C2OMXNode(comp);
+        mOmxNode = new hardware::media::omx::V1_0::utils::TWOmxNode(*node);
+        (*node)->setFrameSize(mWidth, mHeight);
         // Usage is queried during configure(), so setting it beforehand.
         // 64 bit set parameter is existing only in C2OMXNode.
         OMX_U64 usage64 = mConfig.mUsage;
-        status_t res = mNode->setParameter(
+        status_t res = (*node)->setParameter(
                 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits64,
                 &usage64, sizeof(usage64));
 
         if (res != OK) {
             OMX_U32 usage = mConfig.mUsage & 0xFFFFFFFF;
-            (void)mNode->setParameter(
+            (void)(*node)->setParameter(
                     (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
                     &usage, sizeof(usage));
         }
@@ -244,17 +248,18 @@
     }
 
     void disconnect() override {
-        if (mNode == nullptr) {
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
             return;
         }
-        sp<IOMXBufferSource> source = mNode->getSource();
+        sp<IOMXBufferSource> source = (*node)->getSource();
         if (source == nullptr) {
             ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
             return;
         }
         source->onOmxIdle();
         source->onOmxLoaded();
-        mNode.clear();
+        node->clear();
         mOmxNode.clear();
     }
 
@@ -268,7 +273,11 @@
     }
 
     status_t start() override {
-        sp<IOMXBufferSource> source = mNode->getSource();
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return NO_INIT;
+        }
+        sp<IOMXBufferSource> source = (*node)->getSource();
         if (source == nullptr) {
             return NO_INIT;
         }
@@ -278,7 +287,7 @@
 
         OMX_PARAM_PORTDEFINITIONTYPE param;
         param.nPortIndex = kPortIndexInput;
-        status_t err = mNode->getParameter(OMX_IndexParamPortDefinition,
+        status_t err = (*node)->getParameter(OMX_IndexParamPortDefinition,
                                            &param, sizeof(param));
         if (err == OK) {
             numSlots = param.nBufferCountActual;
@@ -297,6 +306,7 @@
     }
 
     status_t configure(Config &config) {
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
         std::stringstream status;
         status_t err = OK;
 
@@ -317,7 +327,7 @@
 
         // pts gap
         if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
-            if (mNode != nullptr) {
+            if ((*node) != nullptr) {
                 OMX_PARAM_U32TYPE ptrGapParam = {};
                 ptrGapParam.nSize = sizeof(OMX_PARAM_U32TYPE);
                 float gap = (config.mMinAdjustedFps > 0)
@@ -326,7 +336,7 @@
                 // float -> uint32_t is undefined if the value is negative.
                 // First convert to int32_t to ensure the expected behavior.
                 ptrGapParam.nU32 = int32_t(gap);
-                (void)mNode->setParameter(
+                (void)(*node)->setParameter(
                         (OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl,
                         &ptrGapParam, sizeof(ptrGapParam));
             }
@@ -426,8 +436,8 @@
 
         // priority
         if (mConfig.mPriority != config.mPriority) {
-            if (config.mPriority != INT_MAX) {
-                mNode->setPriority(config.mPriority);
+            if (config.mPriority != INT_MAX && (*node) != nullptr) {
+                (*node)->setPriority(config.mPriority);
             }
             mConfig.mPriority = config.mPriority;
         }
@@ -441,20 +451,40 @@
     }
 
     void onInputBufferDone(c2_cntr64_t index) override {
-        mNode->onInputBufferDone(index);
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return;
+        }
+        (*node)->onInputBufferDone(index);
+    }
+
+    void onInputBufferEmptied() override {
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return;
+        }
+        (*node)->onInputBufferEmptied();
     }
 
     android_dataspace getDataspace() override {
-        return mNode->getDataspace();
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return HAL_DATASPACE_UNKNOWN;
+        }
+        return (*node)->getDataspace();
     }
 
     uint32_t getPixelFormat() override {
-        return mNode->getPixelFormat();
+        Mutexed<sp<C2OMXNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return PIXEL_FORMAT_UNKNOWN;
+        }
+        return (*node)->getPixelFormat();
     }
 
 private:
     sp<HGraphicBufferSource> mSource;
-    sp<C2OMXNode> mNode;
+    Mutexed<sp<C2OMXNode>> mNode;
     sp<hardware::media::omx::V1_0::IOmxNode> mOmxNode;
     uint32_t mWidth;
     uint32_t mHeight;
@@ -475,33 +505,44 @@
     ~AGraphicBufferSourceWrapper() override = default;
 
     status_t connect(const std::shared_ptr<Codec2Client::Component> &comp) override {
-        mNode = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
-        mNode->setFrameSize(mWidth, mHeight);
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        *node = ::ndk::SharedRefBase::make<C2AidlNode>(comp);
+        (*node)->setFrameSize(mWidth, mHeight);
         // Usage is queried during configure(), so setting it beforehand.
         uint64_t usage = mConfig.mUsage;
-        (void)mNode->setConsumerUsage((int64_t)usage);
+        (void)(*node)->setConsumerUsage((int64_t)usage);
 
+        // AIDL does not define legacy dataspace.
+        android_dataspace_t dataspace = mDataSpace;
+        if (android::media::codec::provider_->dataspace_v0_partial()) {
+            ColorUtils::convertDataSpaceToV0(dataspace);
+        }
         return fromAidlStatus(mSource->configure(
-                mNode, static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
-                        mDataSpace)));
+                (*node), static_cast<::aidl::android::hardware::graphics::common::Dataspace>(
+                        dataspace)));
     }
 
     void disconnect() override {
-        if (mNode == nullptr) {
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
             return;
         }
-        std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+        std::shared_ptr<IAidlBufferSource> source = (*node)->getSource();
         if (source == nullptr) {
             ALOGD("GBSWrapper::disconnect: node is not configured with OMXBufferSource.");
             return;
         }
         (void)source->onStop();
         (void)source->onRelease();
-        mNode.reset();
+        node->reset();
     }
 
     status_t start() override {
-        std::shared_ptr<IAidlBufferSource> source = mNode->getSource();
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return NO_INIT;
+        }
+        std::shared_ptr<IAidlBufferSource> source = (*node)->getSource();
         if (source == nullptr) {
             return NO_INIT;
         }
@@ -509,7 +550,7 @@
         size_t numSlots = 16;
 
         IAidlNode::InputBufferParams param;
-        status_t err = fromAidlStatus(mNode->getInputBufferParams(&param));
+        status_t err = fromAidlStatus((*node)->getInputBufferParams(&param));
         if (err == OK) {
             numSlots = param.bufferCountActual;
         }
@@ -527,6 +568,7 @@
     }
 
     status_t configure(Config &config) {
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
         std::stringstream status;
         status_t err = OK;
 
@@ -547,14 +589,14 @@
 
         // pts gap
         if (config.mMinAdjustedFps > 0 || config.mFixedAdjustedFps > 0) {
-            if (mNode != nullptr) {
+            if ((*node) != nullptr) {
                 float gap = (config.mMinAdjustedFps > 0)
                         ? c2_min(INT32_MAX + 0., 1e6 / config.mMinAdjustedFps + 0.5)
                         : c2_max(0. - INT32_MAX, -1e6 / config.mFixedAdjustedFps - 0.5);
                 // float -> uint32_t is undefined if the value is negative.
                 // First convert to int32_t to ensure the expected behavior.
                 int32_t gapUs = int32_t(gap);
-                (void)mNode->setAdjustTimestampGapUs(gapUs);
+                (void)(*node)->setAdjustTimestampGapUs(gapUs);
             }
         }
 
@@ -646,7 +688,7 @@
         // priority
         if (mConfig.mPriority != config.mPriority) {
             if (config.mPriority != INT_MAX) {
-                mNode->setPriority(config.mPriority);
+                (*node)->setPriority(config.mPriority);
             }
             mConfig.mPriority = config.mPriority;
         }
@@ -660,20 +702,40 @@
     }
 
     void onInputBufferDone(c2_cntr64_t index) override {
-        mNode->onInputBufferDone(index);
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return;
+        }
+        (*node)->onInputBufferDone(index);
+    }
+
+    void onInputBufferEmptied() override {
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return;
+        }
+        (*node)->onInputBufferEmptied();
     }
 
     android_dataspace getDataspace() override {
-        return mNode->getDataspace();
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return HAL_DATASPACE_UNKNOWN;
+        }
+        return (*node)->getDataspace();
     }
 
     uint32_t getPixelFormat() override {
-        return mNode->getPixelFormat();
+        Mutexed<std::shared_ptr<C2AidlNode>>::Locked node(mNode);
+        if ((*node) == nullptr) {
+            return PIXEL_FORMAT_UNKNOWN;
+        }
+        return (*node)->getPixelFormat();
     }
 
 private:
     std::shared_ptr<AGraphicBufferSource> mSource;
-    std::shared_ptr<C2AidlNode> mNode;
+    Mutexed<std::shared_ptr<C2AidlNode>> mNode;
     uint32_t mWidth;
     uint32_t mHeight;
     Config mConfig;
@@ -2227,8 +2289,23 @@
     // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
     // prior to comp->stop().
     // See also b/300350761.
-    mChannel->stopUseOutputSurface(pushBlankBuffer);
-    status_t err = comp->stop();
+    //
+    // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    status_t err = C2_OK;
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+        err = comp->stop();
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+    } else {
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+        err = comp->stop();
+    }
     if (err != C2_OK) {
         // TODO: convert err into status_t
         mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
@@ -2323,8 +2400,22 @@
     // So we reverse their order for stopUseOutputSurface() to notify C2Fence waiters
     // prior to comp->release().
     // See also b/300350761.
-    mChannel->stopUseOutputSurface(pushBlankBuffer);
-    comp->release();
+    //
+    // The workaround is no longer needed with fetchGraphicBlock & C2Fence changes.
+    // so we are reverting back to the logical sequence of the operations when
+    // AIDL HALs are selected.
+    // When the HIDL HALs are selected, we retained workaround(the reversed
+    // order) as default in order to keep legacy behavior.
+    bool stopHalBeforeSurface =
+            Codec2Client::IsAidlSelected() ||
+            property_get_bool("debug.codec2.stop_hal_before_surface", false);
+    if (stopHalBeforeSurface && android::media::codec::provider_->stop_hal_before_surface()) {
+        comp->release();
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+    } else {
+        mChannel->stopUseOutputSurface(pushBlankBuffer);
+        comp->release();
+    }
 
     {
         Mutexed<State>::Locked state(mState);
@@ -2568,18 +2659,31 @@
             c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool);
 
             if (status == C2_OK) {
+                int width, height;
+                config->mInputFormat->findInt32("width", &width);
+                config->mInputFormat->findInt32("height", &height);
+                // The length of the qp-map corresponds to the number of 16x16 blocks in one frame
+                int expectedMapSize = ((width + 15) / 16) * ((height + 15) / 16);
                 size_t mapSize = qpOffsetMap->size();
-                std::shared_ptr<C2LinearBlock> block;
-                status = pool->fetchLinearBlock(mapSize,
-                        C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
-                if (status == C2_OK && !block->map().get().error()) {
-                    C2WriteView wView = block->map().get();
-                    uint8_t* outData = wView.data();
-                    memcpy(outData, qpOffsetMap->data(), mapSize);
-                    C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
-                            kParamIndexQpOffsetMapBuffer,
-                            block->share(0, mapSize, C2Fence()));
-                    mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
+                if (mapSize >= expectedMapSize) {
+                    std::shared_ptr<C2LinearBlock> block;
+                    status = pool->fetchLinearBlock(
+                            expectedMapSize,
+                            C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE},
+                            &block);
+                    if (status == C2_OK && !block->map().get().error()) {
+                        C2WriteView wView = block->map().get();
+                        uint8_t* outData = wView.data();
+                        memcpy(outData, qpOffsetMap->data(), expectedMapSize);
+                        C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+                                kParamIndexQpOffsetMapBuffer,
+                                block->share(0, expectedMapSize, C2Fence()));
+                        mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
+                    }
+                } else {
+                    ALOGE("Ignoring param key %s as buffer size %zu is less than expected "
+                          "buffer size %d",
+                          PARAMETER_KEY_QP_OFFSET_MAP, mapSize, expectedMapSize);
                 }
             }
             params->removeEntryByName(PARAMETER_KEY_QP_OFFSET_MAP);
@@ -2596,6 +2700,15 @@
     if (config->mInputSurface == nullptr
             && (property_get_bool("debug.stagefright.ccodec_delayed_params", false)
                     || comp->getName().find("c2.android.") == 0)) {
+        std::vector<std::unique_ptr<C2Param>> localConfigUpdate;
+        for (const std::unique_ptr<C2Param> &param : configUpdate) {
+            if (param && param->coreIndex().coreIndex() == C2StreamSurfaceScalingInfo::CORE_INDEX) {
+                localConfigUpdate.push_back(C2Param::Copy(*param));
+            }
+        }
+        if (!localConfigUpdate.empty()) {
+            (void)config->setParameters(comp, localConfigUpdate, C2_MAY_BLOCK);
+        }
         mChannel->setParameters(configUpdate);
     } else {
         sp<AMessage> outputFormat = config->mOutputFormat;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 3984b83..3ef2f84 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -228,15 +228,23 @@
 status_t CCodecBufferChannel::setInputSurface(
         const std::shared_ptr<InputSurfaceWrapper> &surface) {
     ALOGV("[%s] setInputSurface", mName);
-    mInputSurface = surface;
-    return mInputSurface->connect(mComponent);
+    if (!surface) {
+        ALOGE("[%s] setInputSurface: surface must not be null", mName);
+        return BAD_VALUE;
+    }
+    Mutexed<InputSurface>::Locked inputSurface(mInputSurface);
+    inputSurface->numProcessingBuffersBalance = 0;
+    inputSurface->surface = surface;
+    mHasInputSurface = true;
+    return inputSurface->surface->connect(mComponent);
 }
 
 status_t CCodecBufferChannel::signalEndOfInputStream() {
-    if (mInputSurface == nullptr) {
+    Mutexed<InputSurface>::Locked inputSurface(mInputSurface);
+    if (inputSurface->surface == nullptr) {
         return INVALID_OPERATION;
     }
-    return mInputSurface->signalEndOfInputStream();
+    return inputSurface->surface->signalEndOfInputStream();
 }
 
 status_t CCodecBufferChannel::queueInputBufferInternal(
@@ -1061,13 +1069,36 @@
     if (mInputMetEos) {
         return;
     }
-    {
+    int64_t numOutputSlots = 0;
+    bool outputFull = [this, &numOutputSlots]() {
         Mutexed<Output>::Locked output(mOutput);
-        if (!output->buffers ||
-                output->buffers->hasPending() ||
-                (!output->bounded && output->buffers->numActiveSlots() >= output->numSlots)) {
-            return;
+        if (!output->buffers) {
+            ALOGV("[%s] feedInputBufferIfAvailableInternal: "
+                  "return because output buffers are null", mName);
+            return true;
         }
+        numOutputSlots = int64_t(output->numSlots);
+        if (output->buffers->hasPending() ||
+                (!output->bounded && output->buffers->numActiveSlots() >= output->numSlots)) {
+            ALOGV("[%s] feedInputBufferIfAvailableInternal: "
+                  "return because there are no room for more output buffers", mName);
+            return true;
+        }
+        return false;
+    }();
+    if (android::media::codec::provider_->input_surface_throttle()) {
+        Mutexed<InputSurface>::Locked inputSurface(mInputSurface);
+        if (inputSurface->surface) {
+            if (inputSurface->numProcessingBuffersBalance <= numOutputSlots) {
+                ++inputSurface->numProcessingBuffersBalance;
+                ALOGV("[%s] feedInputBufferIfAvailableInternal: numProcessingBuffersBalance = %lld",
+                      mName, static_cast<long long>(inputSurface->numProcessingBuffersBalance));
+                inputSurface->surface->onInputBufferEmptied();
+            }
+        }
+    }
+    if (outputFull) {
+        return;
     }
     size_t numActiveSlots = 0;
     while (!mPipelineWatcher.lock()->pipelineFull()) {
@@ -1465,6 +1496,17 @@
     }
 }
 
+void CCodecBufferChannel::onBufferAttachedToOutputSurface(uint32_t generation) {
+    // Note: Since this is called asynchronously from IProducerListener not
+    // knowing the internal state of CCodec/CCodecBufferChannel,
+    // prevent mComponent from being destroyed by holding the shared reference
+    // during this interface being executed.
+    std::shared_ptr<Codec2Client::Component> comp = mComponent;
+    if (comp) {
+        comp->onBufferAttachedToOutputSurface(generation);
+    }
+}
+
 status_t CCodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
     ALOGV("[%s] discardBuffer: %p", mName, buffer.get());
     bool released = false;
@@ -1685,7 +1727,7 @@
                 && (hasCryptoOrDescrambler() || conforming)) {
             input->buffers.reset(new SlotInputBuffers(mName));
         } else if (graphic) {
-            if (mInputSurface) {
+            if (mHasInputSurface) {
                 input->buffers.reset(new DummyInputBuffers(mName));
             } else if (mMetaMode == MODE_ANW) {
                 input->buffers.reset(new GraphicMetadataInputBuffers(mName));
@@ -1968,7 +2010,7 @@
 
 status_t CCodecBufferChannel::prepareInitialInputBuffers(
         std::map<size_t, sp<MediaCodecBuffer>> *clientInputBuffers, bool retry) {
-    if (mInputSurface) {
+    if (mHasInputSurface) {
         return OK;
     }
 
@@ -2094,11 +2136,13 @@
 
 void CCodecBufferChannel::reset() {
     stop();
-    if (mInputSurface != nullptr) {
-        mInputSurface.reset();
-    }
     mPipelineWatcher.lock()->flush();
     {
+        mHasInputSurface = false;
+        Mutexed<InputSurface>::Locked inputSurface(mInputSurface);
+        inputSurface->surface.reset();
+    }
+    {
         Mutexed<Input>::Locked input(mInput);
         input->buffers.reset(new DummyInputBuffers(""));
         input->extraBuffers.flush();
@@ -2191,9 +2235,6 @@
 
 void CCodecBufferChannel::onInputBufferDone(
         uint64_t frameIndex, size_t arrayIndex) {
-    if (mInputSurface) {
-        return;
-    }
     std::shared_ptr<C2Buffer> buffer =
             mPipelineWatcher.lock()->onInputBufferReleased(frameIndex, arrayIndex);
     bool newInputSlotAvailable = false;
@@ -2248,7 +2289,7 @@
         notifyClient = false;
     }
 
-    if (mInputSurface == nullptr && (work->worklets.size() != 1u
+    if (!mHasInputSurface && (work->worklets.size() != 1u
             || !work->worklets.front()
             || !(work->worklets.front()->output.flags &
                  C2FrameData::FLAG_INCOMPLETE))) {
@@ -2457,7 +2498,7 @@
     c2_cntr64_t timestamp =
         worklet->output.ordinal.timestamp + work->input.ordinal.customOrdinal
                 - work->input.ordinal.timestamp;
-    if (mInputSurface != nullptr) {
+    if (mHasInputSurface) {
         // When using input surface we need to restore the original input timestamp.
         timestamp = work->input.ordinal.customOrdinal;
     }
@@ -2587,8 +2628,6 @@
         switch (action) {
         case OutputBuffers::SKIP:
             return;
-        case OutputBuffers::DISCARD:
-            break;
         case OutputBuffers::NOTIFY_CLIENT:
         {
             // TRICKY: we want popped buffers reported in order, so sending
@@ -2607,8 +2646,8 @@
                                 bufferMetadata->m.values[nMeta];
                         flag = convertFlags(bufferMetadataStruct.flags, false);
                         accessUnitInfos.emplace_back(flag,
-                                static_cast<size_t>(bufferMetadataStruct.size),
-                                static_cast<size_t>(bufferMetadataStruct.timestamp));
+                                bufferMetadataStruct.size,
+                                bufferMetadataStruct.timestamp);
                     }
                     sp<WrapperObject<std::vector<AccessUnitInfo>>> obj{
                         new WrapperObject<std::vector<AccessUnitInfo>>{accessUnitInfos}};
@@ -2616,6 +2655,15 @@
                 }
             }
             mCallback->onOutputBufferAvailable(index, outBuffer);
+            [[fallthrough]];
+        }
+        case OutputBuffers::DISCARD: {
+            if (mHasInputSurface && android::media::codec::provider_->input_surface_throttle()) {
+                Mutexed<InputSurface>::Locked inputSurface(mInputSurface);
+                --inputSurface->numProcessingBuffersBalance;
+                ALOGV("[%s] onWorkDone: numProcessingBuffersBalance = %lld",
+                        mName, static_cast<long long>(inputSurface->numProcessingBuffersBalance));
+            }
             break;
         }
         case OutputBuffers::REALLOCATE:
@@ -2784,7 +2832,15 @@
 }
 
 void CCodecBufferChannel::setInfoBuffer(const std::shared_ptr<C2InfoBuffer> &buffer) {
-    mInfoBuffers.push_back(buffer);
+    if (!mHasInputSurface) {
+        mInfoBuffers.push_back(buffer);
+    } else {
+        std::list<std::unique_ptr<C2Work>> items;
+        std::unique_ptr<C2Work> work(new C2Work);
+        work->input.infoBuffers.emplace_back(*buffer);
+        work->worklets.emplace_back(new C2Worklet);
+        items.push_back(std::move(work));
+    }
 }
 
 status_t toStatusT(c2_status_t c2s, c2_operation_t c2op) {
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index 94a5998..4d296fd 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -102,6 +102,7 @@
             const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) override;
     void pollForRenderedBuffers() override;
     void onBufferReleasedFromOutputSurface(uint32_t generation) override;
+    void onBufferAttachedToOutputSurface(uint32_t generation) override;
     status_t discardBuffer(const sp<MediaCodecBuffer> &buffer) override;
     void getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
     void getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) override;
@@ -390,7 +391,21 @@
     };
     Mutexed<BlockPools> mBlockPools;
 
-    std::shared_ptr<InputSurfaceWrapper> mInputSurface;
+    std::atomic_bool mHasInputSurface;
+    struct InputSurface {
+        std::shared_ptr<InputSurfaceWrapper> surface;
+        // This variable tracks the number of buffers processing
+        // in the input surface and codec by counting the # of buffers to
+        // be filled in and queued from the input surface and the # of
+        // buffers generated from the codec.
+        //
+        // Note that this variable can go below 0, because it does not take
+        // account the number of buffers initially in the buffer queue at
+        // start. This is okay, as we only track how many more we allow
+        // from the initial state.
+        int64_t numProcessingBuffersBalance;
+    };
+    Mutexed<InputSurface> mInputSurface;
 
     MetaMode mMetaMode;
 
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index db59227..a943626 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1890,22 +1890,32 @@
         if (mDomain == (IS_VIDEO | IS_ENCODER)) {
             AString qpOffsetRects;
             if (params->findString(PARAMETER_KEY_QP_OFFSET_RECTS, &qpOffsetRects)) {
+                int width, height;
+                mInputFormat->findInt32("width", &width);
+                mInputFormat->findInt32("height", &height);
                 std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
                 char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
                 strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
-                char* box = strtok(mutableStrQpOffsetRects, ";");
+                char* savePtr;
+                char* box = strtok_r(mutableStrQpOffsetRects, ";", &savePtr);
                 while (box != nullptr) {
                     int top, left, bottom, right, offset;
                     if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
                         left = c2_max(0, left);
                         top = c2_max(0, top);
+                        right = c2_min(right, width);
+                        bottom = c2_min(bottom, height);
                         if (right > left && bottom > top) {
                             C2Rect rect(right - left, bottom - top);
                             rect.at(left, top);
                             c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
+                        } else {
+                            ALOGE("Rects configuration %s is not valid.", box);
                         }
+                    } else {
+                        ALOGE("Rects configuration %s doesn't follow the string pattern.", box);
                     }
-                    box = strtok(nullptr, ";");
+                    box = strtok_r(nullptr, ";", &savePtr);
                 }
                 if (c2QpOffsetRects.size() != 0) {
                     const std::unique_ptr<C2StreamQpOffsetRects::output> regions =
diff --git a/media/codec2/sfplugin/Codec2Buffer.cpp b/media/codec2/sfplugin/Codec2Buffer.cpp
index 2550dcf..7d4e8ab 100644
--- a/media/codec2/sfplugin/Codec2Buffer.cpp
+++ b/media/codec2/sfplugin/Codec2Buffer.cpp
@@ -20,6 +20,8 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include <android_media_codec.h>
+
 #include <aidl/android/hardware/graphics/common/Cta861_3.h>
 #include <aidl/android/hardware/graphics/common/Smpte2086.h>
 #include <android-base/no_destructor.h>
@@ -33,6 +35,7 @@
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/foundation/ColorUtils.h>
 #include <mediadrm/ICrypto.h>
 #include <nativebase/nativebase.h>
 #include <ui/GraphicBufferMapper.h>
@@ -43,6 +46,7 @@
 #include <C2Debug.h>
 
 #include "Codec2Buffer.h"
+#include "Codec2BufferUtils.h"
 
 namespace android {
 
@@ -215,482 +219,6 @@
     mBufferRef.reset();
 }
 
-// GraphicView2MediaImageConverter
-
-namespace {
-
-class GraphicView2MediaImageConverter {
-public:
-    /**
-     * Creates a C2GraphicView <=> MediaImage converter
-     *
-     * \param view C2GraphicView object
-     * \param format buffer format
-     * \param copy whether the converter is used for copy or not
-     */
-    GraphicView2MediaImageConverter(
-            const C2GraphicView &view, const sp<AMessage> &format, bool copy)
-        : mInitCheck(NO_INIT),
-          mView(view),
-          mWidth(view.width()),
-          mHeight(view.height()),
-          mAllocatedDepth(0),
-          mBackBufferSize(0),
-          mMediaImage(new ABuffer(sizeof(MediaImage2))) {
-        ATRACE_CALL();
-        if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
-            mClientColorFormat = COLOR_FormatYUV420Flexible;
-        }
-        if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
-            mComponentColorFormat = COLOR_FormatYUV420Flexible;
-        }
-        if (view.error() != C2_OK) {
-            ALOGD("Converter: view.error() = %d", view.error());
-            mInitCheck = BAD_VALUE;
-            return;
-        }
-        MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
-        const C2PlanarLayout &layout = view.layout();
-        if (layout.numPlanes == 0) {
-            ALOGD("Converter: 0 planes");
-            mInitCheck = BAD_VALUE;
-            return;
-        }
-        memset(mediaImage, 0, sizeof(*mediaImage));
-        mAllocatedDepth = layout.planes[0].allocatedDepth;
-        uint32_t bitDepth = layout.planes[0].bitDepth;
-
-        // align width and height to support subsampling cleanly
-        uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
-        uint32_t vStride = align(view.crop().height, 2);
-
-        bool tryWrapping = !copy;
-
-        switch (layout.type) {
-            case C2PlanarLayout::TYPE_YUV: {
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
-                if (layout.numPlanes != 3) {
-                    ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                std::optional<int> clientBitDepth = {};
-                switch (mClientColorFormat) {
-                    case COLOR_FormatYUVP010:
-                        clientBitDepth = 10;
-                        break;
-                    case COLOR_FormatYUV411PackedPlanar:
-                    case COLOR_FormatYUV411Planar:
-                    case COLOR_FormatYUV420Flexible:
-                    case COLOR_FormatYUV420PackedPlanar:
-                    case COLOR_FormatYUV420PackedSemiPlanar:
-                    case COLOR_FormatYUV420Planar:
-                    case COLOR_FormatYUV420SemiPlanar:
-                    case COLOR_FormatYUV422Flexible:
-                    case COLOR_FormatYUV422PackedPlanar:
-                    case COLOR_FormatYUV422PackedSemiPlanar:
-                    case COLOR_FormatYUV422Planar:
-                    case COLOR_FormatYUV422SemiPlanar:
-                    case COLOR_FormatYUV444Flexible:
-                    case COLOR_FormatYUV444Interleaved:
-                        clientBitDepth = 8;
-                        break;
-                    default:
-                        // no-op; used with optional
-                        break;
-
-                }
-                // conversion fails if client bit-depth and the component bit-depth differs
-                if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
-                    ALOGD("Bit depth of client: %d and component: %d differs",
-                        *clientBitDepth, bitDepth);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
-                C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
-                C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
-                if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
-                        || uPlane.channel != C2PlaneInfo::CHANNEL_CB
-                        || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
-                    ALOGD("Converter: not YUV layout");
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
-                        && uPlane.rowSampling == 2 && uPlane.colSampling == 2
-                        && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
-                if (yuv420888) {
-                    for (uint32_t i = 0; i < 3; ++i) {
-                        const C2PlaneInfo &plane = layout.planes[i];
-                        if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
-                            yuv420888 = false;
-                            break;
-                        }
-                    }
-                    yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
-                }
-                int32_t copyFormat = mClientColorFormat;
-                if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
-                    if (uPlane.colInc == 2 && vPlane.colInc == 2
-                            && yPlane.rowInc == uPlane.rowInc) {
-                        copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
-                    } else if (uPlane.colInc == 1 && vPlane.colInc == 1
-                            && yPlane.rowInc == uPlane.rowInc * 2) {
-                        copyFormat = COLOR_FormatYUV420PackedPlanar;
-                    }
-                }
-                ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
-                        "v:{colInc=%d rowInc=%d}",
-                        mClientColorFormat,
-                        yPlane.colInc, yPlane.rowInc,
-                        uPlane.colInc, uPlane.rowInc,
-                        vPlane.colInc, vPlane.rowInc);
-                switch (copyFormat) {
-                    case COLOR_FormatYUV420Flexible:
-                    case COLOR_FormatYUV420Planar:
-                    case COLOR_FormatYUV420PackedPlanar:
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-
-                        if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
-                            tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
-                                    && yPlane.rowInc == uPlane.rowInc * 2
-                                    && view.data()[0] < view.data()[1]
-                                    && view.data()[1] < view.data()[2];
-                        }
-                        break;
-
-                    case COLOR_FormatYUV420SemiPlanar:
-                    case COLOR_FormatYUV420PackedSemiPlanar:
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 1;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-
-                        if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
-                            tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
-                                    && yPlane.rowInc == uPlane.rowInc
-                                    && view.data()[0] < view.data()[1]
-                                    && view.data()[1] < view.data()[2];
-                        }
-                        break;
-
-                    case COLOR_FormatYUVP010:
-                        // stride is in bytes
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = 2;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
-
-                        mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
-                        mediaImage->mPlane[mediaImage->U].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
-
-                        mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
-                        mediaImage->mPlane[mediaImage->V].mColInc = 4;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = stride;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
-                        if (tryWrapping) {
-                            tryWrapping = yPlane.allocatedDepth == 16
-                                    && uPlane.allocatedDepth == 16
-                                    && vPlane.allocatedDepth == 16
-                                    && yPlane.bitDepth == 10
-                                    && uPlane.bitDepth == 10
-                                    && vPlane.bitDepth == 10
-                                    && yPlane.rightShift == 6
-                                    && uPlane.rightShift == 6
-                                    && vPlane.rightShift == 6
-                                    && yPlane.rowSampling == 1 && yPlane.colSampling == 1
-                                    && uPlane.rowSampling == 2 && uPlane.colSampling == 2
-                                    && vPlane.rowSampling == 2 && vPlane.colSampling == 2
-                                    && yPlane.colInc == 2
-                                    && uPlane.colInc == 4
-                                    && vPlane.colInc == 4
-                                    && yPlane.rowInc == uPlane.rowInc
-                                    && yPlane.rowInc == vPlane.rowInc;
-                        }
-                        break;
-
-                    default: {
-                        // default to fully planar format --- this will be overridden if wrapping
-                        // TODO: keep interleaved format
-                        int32_t colInc = divUp(mAllocatedDepth, 8u);
-                        int32_t rowInc = stride * colInc / yPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->Y].mOffset = 0;
-                        mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
-                        int32_t offset = rowInc * vStride / yPlane.rowSampling;
-
-                        rowInc = stride * colInc / uPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->U].mOffset = offset;
-                        mediaImage->mPlane[mediaImage->U].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
-                        offset += rowInc * vStride / uPlane.rowSampling;
-
-                        rowInc = stride * colInc / vPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->V].mOffset = offset;
-                        mediaImage->mPlane[mediaImage->V].mColInc = colInc;
-                        mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
-                        mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
-                        mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
-                        break;
-                    }
-                }
-                break;
-            }
-
-            case C2PlanarLayout::TYPE_YUVA:
-                ALOGD("Converter: unrecognized color format "
-                        "(client %d component %d) for YUVA layout",
-                        mClientColorFormat, mComponentColorFormat);
-                mInitCheck = NO_INIT;
-                return;
-            case C2PlanarLayout::TYPE_RGB:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
-                // TODO: support MediaImage layout
-                switch (mClientColorFormat) {
-                    case COLOR_FormatSurface:
-                    case COLOR_FormatRGBFlexible:
-                    case COLOR_Format24bitBGR888:
-                    case COLOR_Format24bitRGB888:
-                        ALOGD("Converter: accept color format "
-                                "(client %d component %d) for RGB layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        break;
-                    default:
-                        ALOGD("Converter: unrecognized color format "
-                                "(client %d component %d) for RGB layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
-                }
-                if (layout.numPlanes != 3) {
-                    ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                break;
-            case C2PlanarLayout::TYPE_RGBA:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
-                // TODO: support MediaImage layout
-                switch (mClientColorFormat) {
-                    case COLOR_FormatSurface:
-                    case COLOR_FormatRGBAFlexible:
-                    case COLOR_Format32bitABGR8888:
-                    case COLOR_Format32bitARGB8888:
-                    case COLOR_Format32bitBGRA8888:
-                        ALOGD("Converter: accept color format "
-                                "(client %d component %d) for RGBA layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        break;
-                    default:
-                        ALOGD("Converter: unrecognized color format "
-                                "(client %d component %d) for RGBA layout",
-                                mClientColorFormat, mComponentColorFormat);
-                        mInitCheck = BAD_VALUE;
-                        return;
-                }
-                if (layout.numPlanes != 4) {
-                    ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
-                    mInitCheck = BAD_VALUE;
-                    return;
-                }
-                break;
-            default:
-                mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
-                if (layout.numPlanes == 1) {
-                    const C2PlaneInfo &plane = layout.planes[0];
-                    if (plane.colInc < 0 || plane.rowInc < 0) {
-                        // Copy-only if we have negative colInc/rowInc
-                        tryWrapping = false;
-                    }
-                    mediaImage->mPlane[0].mOffset = 0;
-                    mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
-                    mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
-                    mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
-                    mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
-                } else {
-                    ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
-                            mClientColorFormat, mComponentColorFormat);
-                    mInitCheck = NO_INIT;
-                    return;
-                }
-                break;
-        }
-        if (tryWrapping) {
-            // try to map directly. check if the planes are near one another
-            const uint8_t *minPtr = mView.data()[0];
-            const uint8_t *maxPtr = mView.data()[0];
-            int32_t planeSize = 0;
-            for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-                const C2PlaneInfo &plane = layout.planes[i];
-                int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
-                ssize_t minOffset = plane.minOffset(
-                        mWidth / plane.colSampling, mHeight / plane.rowSampling);
-                ssize_t maxOffset = plane.maxOffset(
-                        mWidth / plane.colSampling, mHeight / plane.rowSampling);
-                if (minPtr > mView.data()[i] + minOffset) {
-                    minPtr = mView.data()[i] + minOffset;
-                }
-                if (maxPtr < mView.data()[i] + maxOffset) {
-                    maxPtr = mView.data()[i] + maxOffset;
-                }
-                planeSize += planeStride * divUp(mAllocatedDepth, 8u)
-                        * align(mHeight, 64) / plane.rowSampling;
-            }
-
-            if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
-                // FIXME: this is risky as reading/writing data out of bound results
-                //        in an undefined behavior, but gralloc does assume a
-                //        contiguous mapping
-                for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-                    const C2PlaneInfo &plane = layout.planes[i];
-                    mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
-                    mediaImage->mPlane[i].mColInc = plane.colInc;
-                    mediaImage->mPlane[i].mRowInc = plane.rowInc;
-                    mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
-                    mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
-                }
-                mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
-                ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
-            }
-        }
-        mediaImage->mNumPlanes = layout.numPlanes;
-        mediaImage->mWidth = view.crop().width;
-        mediaImage->mHeight = view.crop().height;
-        mediaImage->mBitDepth = bitDepth;
-        mediaImage->mBitDepthAllocated = mAllocatedDepth;
-
-        uint32_t bufferSize = 0;
-        for (uint32_t i = 0; i < layout.numPlanes; ++i) {
-            const C2PlaneInfo &plane = layout.planes[i];
-            if (plane.allocatedDepth < plane.bitDepth
-                    || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
-                ALOGD("rightShift value of %u unsupported", plane.rightShift);
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
-                ALOGD("endianness value of %u unsupported", plane.endianness);
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
-                ALOGD("different allocatedDepth/bitDepth per plane unsupported");
-                mInitCheck = BAD_VALUE;
-                return;
-            }
-            // stride is in bytes
-            bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
-        }
-
-        mBackBufferSize = bufferSize;
-        mInitCheck = OK;
-    }
-
-    status_t initCheck() const { return mInitCheck; }
-
-    uint32_t backBufferSize() const { return mBackBufferSize; }
-
-    /**
-     * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
-     * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
-     * data into a backing buffer explicitly.
-     *
-     * \return media buffer. This is null if wrapping failed.
-     */
-    sp<ABuffer> wrap() const {
-        if (mBackBuffer == nullptr) {
-            return mWrapped;
-        }
-        return nullptr;
-    }
-
-    bool setBackBuffer(const sp<ABuffer> &backBuffer) {
-        if (backBuffer == nullptr) {
-            return false;
-        }
-        if (backBuffer->capacity() < mBackBufferSize) {
-            return false;
-        }
-        backBuffer->setRange(0, mBackBufferSize);
-        mBackBuffer = backBuffer;
-        return true;
-    }
-
-    /**
-     * Copy C2GraphicView to MediaImage2.
-     */
-    status_t copyToMediaImage() {
-        ATRACE_CALL();
-        if (mInitCheck != OK) {
-            return mInitCheck;
-        }
-        return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
-    }
-
-    const sp<ABuffer> &imageData() const { return mMediaImage; }
-
-private:
-    status_t mInitCheck;
-
-    const C2GraphicView mView;
-    uint32_t mWidth;
-    uint32_t mHeight;
-    int32_t mClientColorFormat;  ///< SDK color format for MediaImage
-    int32_t mComponentColorFormat;  ///< SDK color format from component
-    sp<ABuffer> mWrapped;  ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
-    uint32_t mAllocatedDepth;
-    uint32_t mBackBufferSize;
-    sp<ABuffer> mMediaImage;
-    std::function<sp<ABuffer>(size_t)> mAlloc;
-
-    sp<ABuffer> mBackBuffer;    ///< backing buffer if we have to copy C2Buffer <=> ABuffer
-
-    MediaImage2 *getMediaImage() {
-        return (MediaImage2 *)mMediaImage->base();
-    }
-};
-
-}  // namespace
-
 // GraphicBlockBuffer
 
 // static
@@ -1210,6 +738,10 @@
         // Gralloc4 not supported; nothing to do
         return err;
     }
+    // Use V0 dataspaces for Gralloc4+
+    if (android::media::codec::provider_->dataspace_v0_partial()) {
+        ColorUtils::convertDataSpaceToV0(dataSpace);
+    }
     status_t status = mapper.setDataspace(buffer.get(), static_cast<ui::Dataspace>(dataSpace));
     if (status != OK) {
        err = C2_CORRUPTED;
diff --git a/media/codec2/sfplugin/Codec2Buffer.h b/media/codec2/sfplugin/Codec2Buffer.h
index 5e96921..8c5e909 100644
--- a/media/codec2/sfplugin/Codec2Buffer.h
+++ b/media/codec2/sfplugin/Codec2Buffer.h
@@ -44,28 +44,6 @@
 }  // namespace drm
 }  // namespace hardware
 
-/**
- * Copies a graphic view into a media image.
- *
- * \param imgBase base of MediaImage
- * \param img MediaImage data
- * \param view graphic view
- *
- * \return OK on success
- */
-status_t ImageCopy(uint8_t *imgBase, const MediaImage2 *img, const C2GraphicView &view);
-
-/**
- * Copies a media image into a graphic view.
- *
- * \param view graphic view
- * \param imgBase base of MediaImage
- * \param img MediaImage data
- *
- * \return OK on success
- */
-status_t ImageCopy(C2GraphicView &view, const uint8_t *imgBase, const MediaImage2 *img);
-
 class Codec2Buffer : public MediaCodecBuffer {
 public:
     using MediaCodecBuffer::MediaCodecBuffer;
diff --git a/media/codec2/sfplugin/InputSurfaceWrapper.h b/media/codec2/sfplugin/InputSurfaceWrapper.h
index 4bf6cd0..c158c5b 100644
--- a/media/codec2/sfplugin/InputSurfaceWrapper.h
+++ b/media/codec2/sfplugin/InputSurfaceWrapper.h
@@ -102,6 +102,7 @@
     }
 
     /**
+     * Notify that the input buffer reference is no longer needed.
      * Clean up C2Work related references if necessary. No-op by default.
      *
      * \param index index of input work.
@@ -109,6 +110,12 @@
     virtual void onInputBufferDone(c2_cntr64_t /* index */) {}
 
     /**
+     * Signal one input buffer as emptied.
+     * No-op by default.
+     */
+    virtual void onInputBufferEmptied() {}
+
+    /**
      * Returns dataspace information from GraphicBufferSource.
      */
     virtual android_dataspace getDataspace() { return mDataSpace; }
diff --git a/media/codec2/sfplugin/utils/Android.bp b/media/codec2/sfplugin/utils/Android.bp
index 54a6fb1..bed594c 100644
--- a/media/codec2/sfplugin/utils/Android.bp
+++ b/media/codec2/sfplugin/utils/Android.bp
@@ -54,7 +54,7 @@
 
     static_libs: [
         "libarect",
-        "libyuv_static",
+        "libyuv",
     ],
 
     sanitize: {
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
index 75e9bbc..574f1b9 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.cpp
@@ -27,7 +27,10 @@
 
 #include <android/hardware_buffer.h>
 #include <media/hardware/HardwareAPI.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
 #include <media/stagefright/foundation/AUtils.h>
+#include <media/stagefright/MediaCodecConstants.h>
 
 #include <C2Debug.h>
 
@@ -787,4 +790,438 @@
     return MemoryBlockPool().fetch(size);
 }
 
+GraphicView2MediaImageConverter::GraphicView2MediaImageConverter(
+        const C2GraphicView &view, const sp<AMessage> &format, bool copy)
+    : mInitCheck(NO_INIT),
+        mView(view),
+        mWidth(view.width()),
+        mHeight(view.height()),
+        mAllocatedDepth(0),
+        mBackBufferSize(0),
+        mMediaImage(new ABuffer(sizeof(MediaImage2))) {
+    ATRACE_CALL();
+    if (!format->findInt32(KEY_COLOR_FORMAT, &mClientColorFormat)) {
+        mClientColorFormat = COLOR_FormatYUV420Flexible;
+    }
+    if (!format->findInt32("android._color-format", &mComponentColorFormat)) {
+        mComponentColorFormat = COLOR_FormatYUV420Flexible;
+    }
+    if (view.error() != C2_OK) {
+        ALOGD("Converter: view.error() = %d", view.error());
+        mInitCheck = BAD_VALUE;
+        return;
+    }
+    MediaImage2 *mediaImage = (MediaImage2 *)mMediaImage->base();
+    const C2PlanarLayout &layout = view.layout();
+    if (layout.numPlanes == 0) {
+        ALOGD("Converter: 0 planes");
+        mInitCheck = BAD_VALUE;
+        return;
+    }
+    memset(mediaImage, 0, sizeof(*mediaImage));
+    mAllocatedDepth = layout.planes[0].allocatedDepth;
+    uint32_t bitDepth = layout.planes[0].bitDepth;
+
+    // align width and height to support subsampling cleanly
+    uint32_t stride = align(view.crop().width, 2) * divUp(layout.planes[0].allocatedDepth, 8u);
+    uint32_t vStride = align(view.crop().height, 2);
+
+    bool tryWrapping = !copy;
+
+    switch (layout.type) {
+        case C2PlanarLayout::TYPE_YUV: {
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
+            if (layout.numPlanes != 3) {
+                ALOGD("Converter: %d planes for YUV layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            std::optional<int> clientBitDepth = {};
+            switch (mClientColorFormat) {
+                case COLOR_FormatYUVP010:
+                    clientBitDepth = 10;
+                    break;
+                case COLOR_FormatYUV411PackedPlanar:
+                case COLOR_FormatYUV411Planar:
+                case COLOR_FormatYUV420Flexible:
+                case COLOR_FormatYUV420PackedPlanar:
+                case COLOR_FormatYUV420PackedSemiPlanar:
+                case COLOR_FormatYUV420Planar:
+                case COLOR_FormatYUV420SemiPlanar:
+                case COLOR_FormatYUV422Flexible:
+                case COLOR_FormatYUV422PackedPlanar:
+                case COLOR_FormatYUV422PackedSemiPlanar:
+                case COLOR_FormatYUV422Planar:
+                case COLOR_FormatYUV422SemiPlanar:
+                case COLOR_FormatYUV444Flexible:
+                case COLOR_FormatYUV444Interleaved:
+                    clientBitDepth = 8;
+                    break;
+                default:
+                    // no-op; used with optional
+                    break;
+
+            }
+            // conversion fails if client bit-depth and the component bit-depth differs
+            if ((clientBitDepth) && (bitDepth != clientBitDepth.value())) {
+                ALOGD("Bit depth of client: %d and component: %d differs",
+                    *clientBitDepth, bitDepth);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            C2PlaneInfo yPlane = layout.planes[C2PlanarLayout::PLANE_Y];
+            C2PlaneInfo uPlane = layout.planes[C2PlanarLayout::PLANE_U];
+            C2PlaneInfo vPlane = layout.planes[C2PlanarLayout::PLANE_V];
+            if (yPlane.channel != C2PlaneInfo::CHANNEL_Y
+                    || uPlane.channel != C2PlaneInfo::CHANNEL_CB
+                    || vPlane.channel != C2PlaneInfo::CHANNEL_CR) {
+                ALOGD("Converter: not YUV layout");
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            bool yuv420888 = yPlane.rowSampling == 1 && yPlane.colSampling == 1
+                    && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+                    && vPlane.rowSampling == 2 && vPlane.colSampling == 2;
+            if (yuv420888) {
+                for (uint32_t i = 0; i < 3; ++i) {
+                    const C2PlaneInfo &plane = layout.planes[i];
+                    if (plane.allocatedDepth != 8 || plane.bitDepth != 8) {
+                        yuv420888 = false;
+                        break;
+                    }
+                }
+                yuv420888 = yuv420888 && yPlane.colInc == 1 && uPlane.rowInc == vPlane.rowInc;
+            }
+            int32_t copyFormat = mClientColorFormat;
+            if (yuv420888 && mClientColorFormat == COLOR_FormatYUV420Flexible) {
+                if (uPlane.colInc == 2 && vPlane.colInc == 2
+                        && yPlane.rowInc == uPlane.rowInc) {
+                    copyFormat = COLOR_FormatYUV420PackedSemiPlanar;
+                } else if (uPlane.colInc == 1 && vPlane.colInc == 1
+                        && yPlane.rowInc == uPlane.rowInc * 2) {
+                    copyFormat = COLOR_FormatYUV420PackedPlanar;
+                }
+            }
+            ALOGV("client_fmt=0x%x y:{colInc=%d rowInc=%d} u:{colInc=%d rowInc=%d} "
+                    "v:{colInc=%d rowInc=%d}",
+                    mClientColorFormat,
+                    yPlane.colInc, yPlane.rowInc,
+                    uPlane.colInc, uPlane.rowInc,
+                    vPlane.colInc, vPlane.rowInc);
+            switch (copyFormat) {
+                case COLOR_FormatYUV420Flexible:
+                case COLOR_FormatYUV420Planar:
+                case COLOR_FormatYUV420PackedPlanar:
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride / 2;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride * 5 / 4;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride / 2;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+                    if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+                        tryWrapping = yuv420888 && uPlane.colInc == 1 && vPlane.colInc == 1
+                                && yPlane.rowInc == uPlane.rowInc * 2
+                                && view.data()[0] < view.data()[1]
+                                && view.data()[1] < view.data()[2];
+                    }
+                    break;
+
+                case COLOR_FormatYUV420SemiPlanar:
+                case COLOR_FormatYUV420PackedSemiPlanar:
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 1;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 1;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+
+                    if (tryWrapping && mClientColorFormat != COLOR_FormatYUV420Flexible) {
+                        tryWrapping = yuv420888 && uPlane.colInc == 2 && vPlane.colInc == 2
+                                && yPlane.rowInc == uPlane.rowInc
+                                && view.data()[0] < view.data()[1]
+                                && view.data()[1] < view.data()[2];
+                    }
+                    break;
+
+                case COLOR_FormatYUVP010:
+                    // stride is in bytes
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = 2;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = 1;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = 1;
+
+                    mediaImage->mPlane[mediaImage->U].mOffset = stride * vStride;
+                    mediaImage->mPlane[mediaImage->U].mColInc = 4;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = 2;
+
+                    mediaImage->mPlane[mediaImage->V].mOffset = stride * vStride + 2;
+                    mediaImage->mPlane[mediaImage->V].mColInc = 4;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = stride;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = 2;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = 2;
+                    if (tryWrapping) {
+                        tryWrapping = yPlane.allocatedDepth == 16
+                                && uPlane.allocatedDepth == 16
+                                && vPlane.allocatedDepth == 16
+                                && yPlane.bitDepth == 10
+                                && uPlane.bitDepth == 10
+                                && vPlane.bitDepth == 10
+                                && yPlane.rightShift == 6
+                                && uPlane.rightShift == 6
+                                && vPlane.rightShift == 6
+                                && yPlane.rowSampling == 1 && yPlane.colSampling == 1
+                                && uPlane.rowSampling == 2 && uPlane.colSampling == 2
+                                && vPlane.rowSampling == 2 && vPlane.colSampling == 2
+                                && yPlane.colInc == 2
+                                && uPlane.colInc == 4
+                                && vPlane.colInc == 4
+                                && yPlane.rowInc == uPlane.rowInc
+                                && yPlane.rowInc == vPlane.rowInc;
+                    }
+                    break;
+
+                default: {
+                    // default to fully planar format --- this will be overridden if wrapping
+                    // TODO: keep interleaved format
+                    int32_t colInc = divUp(mAllocatedDepth, 8u);
+                    int32_t rowInc = stride * colInc / yPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->Y].mOffset = 0;
+                    mediaImage->mPlane[mediaImage->Y].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->Y].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->Y].mHorizSubsampling = yPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->Y].mVertSubsampling = yPlane.rowSampling;
+                    int32_t offset = rowInc * vStride / yPlane.rowSampling;
+
+                    rowInc = stride * colInc / uPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->U].mOffset = offset;
+                    mediaImage->mPlane[mediaImage->U].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->U].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->U].mHorizSubsampling = uPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->U].mVertSubsampling = uPlane.rowSampling;
+                    offset += rowInc * vStride / uPlane.rowSampling;
+
+                    rowInc = stride * colInc / vPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->V].mOffset = offset;
+                    mediaImage->mPlane[mediaImage->V].mColInc = colInc;
+                    mediaImage->mPlane[mediaImage->V].mRowInc = rowInc;
+                    mediaImage->mPlane[mediaImage->V].mHorizSubsampling = vPlane.colSampling;
+                    mediaImage->mPlane[mediaImage->V].mVertSubsampling = vPlane.rowSampling;
+                    break;
+                }
+            }
+            break;
+        }
+
+        case C2PlanarLayout::TYPE_YUVA:
+            ALOGD("Converter: unrecognized color format "
+                    "(client %d component %d) for YUVA layout",
+                    mClientColorFormat, mComponentColorFormat);
+            mInitCheck = NO_INIT;
+            return;
+        case C2PlanarLayout::TYPE_RGB:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGB;
+            // TODO: support MediaImage layout
+            switch (mClientColorFormat) {
+                case COLOR_FormatSurface:
+                case COLOR_FormatRGBFlexible:
+                case COLOR_Format24bitBGR888:
+                case COLOR_Format24bitRGB888:
+                    ALOGD("Converter: accept color format "
+                            "(client %d component %d) for RGB layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    break;
+                default:
+                    ALOGD("Converter: unrecognized color format "
+                            "(client %d component %d) for RGB layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    mInitCheck = BAD_VALUE;
+                    return;
+            }
+            if (layout.numPlanes != 3) {
+                ALOGD("Converter: %d planes for RGB layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            break;
+        case C2PlanarLayout::TYPE_RGBA:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_RGBA;
+            // TODO: support MediaImage layout
+            switch (mClientColorFormat) {
+                case COLOR_FormatSurface:
+                case COLOR_FormatRGBAFlexible:
+                case COLOR_Format32bitABGR8888:
+                case COLOR_Format32bitARGB8888:
+                case COLOR_Format32bitBGRA8888:
+                    ALOGD("Converter: accept color format "
+                            "(client %d component %d) for RGBA layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    break;
+                default:
+                    ALOGD("Converter: unrecognized color format "
+                            "(client %d component %d) for RGBA layout",
+                            mClientColorFormat, mComponentColorFormat);
+                    mInitCheck = BAD_VALUE;
+                    return;
+            }
+            if (layout.numPlanes != 4) {
+                ALOGD("Converter: %d planes for RGBA layout", layout.numPlanes);
+                mInitCheck = BAD_VALUE;
+                return;
+            }
+            break;
+        default:
+            mediaImage->mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
+            if (layout.numPlanes == 1) {
+                const C2PlaneInfo &plane = layout.planes[0];
+                if (plane.colInc < 0 || plane.rowInc < 0) {
+                    // Copy-only if we have negative colInc/rowInc
+                    tryWrapping = false;
+                }
+                mediaImage->mPlane[0].mOffset = 0;
+                mediaImage->mPlane[0].mColInc = std::abs(plane.colInc);
+                mediaImage->mPlane[0].mRowInc = std::abs(plane.rowInc);
+                mediaImage->mPlane[0].mHorizSubsampling = plane.colSampling;
+                mediaImage->mPlane[0].mVertSubsampling = plane.rowSampling;
+            } else {
+                ALOGD("Converter: unrecognized layout: color format (client %d component %d)",
+                        mClientColorFormat, mComponentColorFormat);
+                mInitCheck = NO_INIT;
+                return;
+            }
+            break;
+    }
+    if (tryWrapping) {
+        // try to map directly. check if the planes are near one another
+        const uint8_t *minPtr = mView.data()[0];
+        const uint8_t *maxPtr = mView.data()[0];
+        int32_t planeSize = 0;
+        for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+            const C2PlaneInfo &plane = layout.planes[i];
+            int64_t planeStride = std::abs(plane.rowInc / plane.colInc);
+            ssize_t minOffset = plane.minOffset(
+                    mWidth / plane.colSampling, mHeight / plane.rowSampling);
+            ssize_t maxOffset = plane.maxOffset(
+                    mWidth / plane.colSampling, mHeight / plane.rowSampling);
+            if (minPtr > mView.data()[i] + minOffset) {
+                minPtr = mView.data()[i] + minOffset;
+            }
+            if (maxPtr < mView.data()[i] + maxOffset) {
+                maxPtr = mView.data()[i] + maxOffset;
+            }
+            planeSize += planeStride * divUp(mAllocatedDepth, 8u)
+                    * align(mHeight, 64) / plane.rowSampling;
+        }
+
+        if (minPtr == mView.data()[0] && (maxPtr - minPtr) <= planeSize) {
+            // FIXME: this is risky as reading/writing data out of bound results
+            //        in an undefined behavior, but gralloc does assume a
+            //        contiguous mapping
+            for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+                const C2PlaneInfo &plane = layout.planes[i];
+                mediaImage->mPlane[i].mOffset = mView.data()[i] - minPtr;
+                mediaImage->mPlane[i].mColInc = plane.colInc;
+                mediaImage->mPlane[i].mRowInc = plane.rowInc;
+                mediaImage->mPlane[i].mHorizSubsampling = plane.colSampling;
+                mediaImage->mPlane[i].mVertSubsampling = plane.rowSampling;
+            }
+            mWrapped = new ABuffer(const_cast<uint8_t *>(minPtr), maxPtr - minPtr);
+            ALOGV("Converter: wrapped (capacity=%zu)", mWrapped->capacity());
+        }
+    }
+    mediaImage->mNumPlanes = layout.numPlanes;
+    mediaImage->mWidth = view.crop().width;
+    mediaImage->mHeight = view.crop().height;
+    mediaImage->mBitDepth = bitDepth;
+    mediaImage->mBitDepthAllocated = mAllocatedDepth;
+
+    uint32_t bufferSize = 0;
+    for (uint32_t i = 0; i < layout.numPlanes; ++i) {
+        const C2PlaneInfo &plane = layout.planes[i];
+        if (plane.allocatedDepth < plane.bitDepth
+                || plane.rightShift != plane.allocatedDepth - plane.bitDepth) {
+            ALOGD("rightShift value of %u unsupported", plane.rightShift);
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        if (plane.allocatedDepth > 8 && plane.endianness != C2PlaneInfo::NATIVE) {
+            ALOGD("endianness value of %u unsupported", plane.endianness);
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        if (plane.allocatedDepth != mAllocatedDepth || plane.bitDepth != bitDepth) {
+            ALOGD("different allocatedDepth/bitDepth per plane unsupported");
+            mInitCheck = BAD_VALUE;
+            return;
+        }
+        // stride is in bytes
+        bufferSize += stride * vStride / plane.rowSampling / plane.colSampling;
+    }
+
+    mBackBufferSize = bufferSize;
+    mInitCheck = OK;
+}
+
+status_t GraphicView2MediaImageConverter::initCheck() const { return mInitCheck; }
+
+uint32_t GraphicView2MediaImageConverter::backBufferSize() const { return mBackBufferSize; }
+
+sp<ABuffer> GraphicView2MediaImageConverter::wrap() const {
+    if (mBackBuffer == nullptr) {
+        return mWrapped;
+    }
+    return nullptr;
+}
+
+bool GraphicView2MediaImageConverter::setBackBuffer(const sp<ABuffer> &backBuffer) {
+    if (backBuffer == nullptr) {
+        return false;
+    }
+    if (backBuffer->capacity() < mBackBufferSize) {
+        return false;
+    }
+    backBuffer->setRange(0, mBackBufferSize);
+    mBackBuffer = backBuffer;
+    return true;
+}
+
+status_t GraphicView2MediaImageConverter::copyToMediaImage() {
+    ATRACE_CALL();
+    if (mInitCheck != OK) {
+        return mInitCheck;
+    }
+    return ImageCopy(mBackBuffer->base(), getMediaImage(), mView);
+}
+
+const sp<ABuffer> &GraphicView2MediaImageConverter::imageData() const { return mMediaImage; }
+
+MediaImage2 *GraphicView2MediaImageConverter::getMediaImage() {
+    return (MediaImage2 *)mMediaImage->base();
+}
+
 }  // namespace android
diff --git a/media/codec2/sfplugin/utils/Codec2BufferUtils.h b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
index 6b0ba7f..8daf3d8 100644
--- a/media/codec2/sfplugin/utils/Codec2BufferUtils.h
+++ b/media/codec2/sfplugin/utils/Codec2BufferUtils.h
@@ -22,6 +22,7 @@
 #include <C2ParamDef.h>
 
 #include <media/hardware/VideoAPI.h>
+#include <utils/StrongPointer.h>
 #include <utils/Errors.h>
 
 namespace android {
@@ -194,6 +195,61 @@
     std::shared_ptr<Impl> mImpl;
 };
 
+struct ABuffer;
+struct AMessage;
+
+class GraphicView2MediaImageConverter {
+public:
+    /**
+     * Creates a C2GraphicView <=> MediaImage converter
+     *
+     * \param view C2GraphicView object
+     * \param format buffer format
+     * \param copy whether the converter is used for copy or not
+     */
+    GraphicView2MediaImageConverter(
+            const C2GraphicView &view, const sp<AMessage> &format, bool copy);
+
+    status_t initCheck() const;
+
+    uint32_t backBufferSize() const;
+
+    /**
+     * Wrap C2GraphicView using a MediaImage2. Note that if not wrapped, the content is not mapped
+     * in this function --- the caller should use CopyGraphicView2MediaImage() function to copy the
+     * data into a backing buffer explicitly.
+     *
+     * \return media buffer. This is null if wrapping failed.
+     */
+    sp<ABuffer> wrap() const;
+
+    bool setBackBuffer(const sp<ABuffer> &backBuffer);
+
+    /**
+     * Copy C2GraphicView to MediaImage2.
+     */
+    status_t copyToMediaImage();
+
+    const sp<ABuffer> &imageData() const;
+
+private:
+    status_t mInitCheck;
+
+    const C2GraphicView mView;
+    uint32_t mWidth;
+    uint32_t mHeight;
+    int32_t mClientColorFormat;  ///< SDK color format for MediaImage
+    int32_t mComponentColorFormat;  ///< SDK color format from component
+    sp<ABuffer> mWrapped;  ///< wrapped buffer (if we can map C2Buffer to an ABuffer)
+    uint32_t mAllocatedDepth;
+    uint32_t mBackBufferSize;
+    sp<ABuffer> mMediaImage;
+
+    sp<ABuffer> mBackBuffer;    ///< backing buffer if we have to copy C2Buffer <=> ABuffer
+
+    MediaImage2 *getMediaImage();
+};
+
 } // namespace android
 
 #endif  // CODEC2_BUFFER_UTILS_H_
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index 77a76e8..7a33af4 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -32,10 +32,15 @@
 namespace android {
 
 
-static bool isAtLeast(int version, const char *codeName) {
-    char deviceCodeName[PROP_VALUE_MAX];
-    __system_property_get("ro.build.version.codename", deviceCodeName);
-    return android_get_device_api_level() >= version || !strcmp(deviceCodeName, codeName);
+static bool isAtLeast(int version, const std::string codeName) {
+    static std::once_flag sCheckOnce;
+    static std::string sDeviceCodeName;
+    static int sDeviceApiLevel;
+    std::call_once(sCheckOnce, [&](){
+        sDeviceCodeName = base::GetProperty("ro.build.version.codename", "");
+        sDeviceApiLevel = android_get_device_api_level();
+    });
+    return sDeviceApiLevel >= version || sDeviceCodeName == codeName;
 }
 
 bool isAtLeastT() {
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index 02c356c..7d671a7 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -45,6 +45,11 @@
         "C2SampleComponent_test.cpp",
         "C2UtilTest.cpp",
         "vndk/C2BufferTest.cpp",
+        "vndk/C2FenceTest.cpp",
+    ],
+
+    static_libs: [
+        "libgmock",
     ],
 
     shared_libs: [
@@ -52,6 +57,7 @@
         "libcodec2_vndk",
         "libcutils",
         "liblog",
+        "libui",
         "libutils",
     ],
 
diff --git a/media/codec2/tests/vndk/C2FenceTest.cpp b/media/codec2/tests/vndk/C2FenceTest.cpp
new file mode 100644
index 0000000..9292381
--- /dev/null
+++ b/media/codec2/tests/vndk/C2FenceTest.cpp
@@ -0,0 +1,455 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <C2Buffer.h>
+#include <C2FenceFactory.h>
+
+#include <unistd.h>
+
+#include <android-base/unique_fd.h>
+#include <linux/kcmp.h>       /* Definition of KCMP_* constants */
+#include <sys/mman.h>
+#include <sys/syscall.h>      /* Definition of SYS_* constants */
+#include <ui/Fence.h>
+
+namespace android {
+
+static int fd_kcmp(int fd1, int fd2) {
+    static pid_t pid = getpid();
+
+    return syscall(SYS_kcmp, pid, pid, KCMP_FILE, fd1, fd2);
+}
+
+// matcher to check if value (arg) and fd refers to the same file
+MATCHER_P(RefersToTheSameFile, fd, "") {
+    return fd_kcmp(fd, arg) == 0;
+}
+
+// matcher to check if value (arg) is a dup of an fd
+MATCHER_P(IsDupOf, fd, "") {
+    return (ExplainMatchResult(::testing::Ne(-1), arg, result_listener) &&
+            ExplainMatchResult(::testing::Ne(fd), arg, result_listener) &&
+            ExplainMatchResult(RefersToTheSameFile(fd), arg, result_listener));
+}
+
+class C2FenceTest : public ::testing::Test {
+public:
+    C2FenceTest() = default;
+
+    ~C2FenceTest() = default;
+
+
+protected:
+    enum : int32_t {
+        SYNC_FENCE_DEPRECATED_MAGIC     = 3,
+        SYNC_FENCE_UNORDERED_MAGIC      = '\302fsu',
+        SYNC_FENCE_MAGIC                = '\302fso',
+    };
+
+    // Validate a null fence
+    void validateNullFence(const C2Fence &fence);
+
+    // Validate a single fd sync fence
+    void validateSingleFdFence(const C2Fence &fence, int fd);
+
+    // Validate a two fd unordered sync fence
+    void validateTwoFdUnorderedFence(const C2Fence &fence, int fd1, int fd2, int mergeFd);
+
+    // Validate a three fd sync fence
+    void validateThreeFdFence(const C2Fence &fence, int fd1, int fd2, int fd3);
+};
+
+TEST_F(C2FenceTest, IsDupOf_sanity) {
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    EXPECT_THAT(fd1, ::testing::Not(IsDupOf(fd2)));
+    EXPECT_THAT(-1, ::testing::Not(IsDupOf(fd2)));
+    EXPECT_THAT(-1, ::testing::Not(IsDupOf(-1)));
+    EXPECT_THAT(fd3, ::testing::Not(IsDupOf(fd3)));
+
+    int fd4 = dup(fd3);
+    EXPECT_THAT(fd4, IsDupOf(fd3));
+    EXPECT_THAT(fd3, IsDupOf(fd4));
+
+    close(fd1);
+    close(fd2);
+    close(fd3);
+    close(fd4);
+}
+
+TEST_F(C2FenceTest, NullFence) {
+    validateNullFence(C2Fence());
+}
+
+void C2FenceTest::validateNullFence(const C2Fence &fence) {
+    // Verify that the fence is valid.
+    EXPECT_TRUE(fence.valid());
+    EXPECT_TRUE(fence.ready());
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_EQ(fenceFd.get(), -1);
+    EXPECT_FALSE(fence.isHW()); // perhaps this should be false for a null fence
+
+    // A null fence has no fds
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::IsEmpty());
+    for (int fd : fds) {
+        close(fd);
+    }
+
+    // A null fence has no native handle
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::IsNull());
+    if (handle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, SyncFence_with_negative_fd) {
+    // Create a SyncFence with a negative fd.
+    C2Fence fence = _C2FenceFactory::CreateSyncFence(-1, false /* validate */);
+
+    validateNullFence(fence);
+}
+
+TEST_F(C2FenceTest, SyncFence_with_valid_fd) {
+    // Create a SyncFence with a valid fd. We cannot create an actual sync fd,
+    // so we cannot test wait(), but we can verify the ABI APIs
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateSyncFence(fd, false /* validate */);
+    validateSingleFdFence(fence, fd);
+}
+
+void C2FenceTest::validateSingleFdFence(const C2Fence &fence, int fd) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the initial fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(fd));
+
+    // Verify that fds returns a duped version of the initial fd
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(1));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 1);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd));
+        EXPECT_EQ(handle->data[1], SYNC_FENCE_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_one_valid_test_fd) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { -1, fd, -1 }, &status);
+    // if we only have one valid fd, we are not merging fences, so the test fd is not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_one_valid_test_fd_null_status) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { -1, fd, -1 });
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_merge_failure) {
+    // Create a multi SyncFence with a multiple non-sync fence fds. This should
+    // result in a fence created, but also an error.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { fd1, fd2, fd3 }, &status);
+    EXPECT_EQ(status, C2_CORRUPTED);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_merge_failure_null_status) {
+    // Create a multi SyncFence with a multiple non-sync fence fds. This should
+    // result in a fence created, but also an error.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { fd1, fd2, fd3 });
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_multiple_fds) {
+    // We cannot create a true unordered multi sync fence as we can only
+    // create test fds and those cannot be merged. As such, we cannot
+    // test the factory method CreateUnorderedMultiSyncFence. We can however
+    // create a test fence from a constructed native handle.
+
+    // Technically, we need 3 fds as if we end up with only 2, we wouldn't
+    // actually need a 2nd (final fence fd) since it is equivalent to the
+    // first. In fact we will generate (and always generated) a single fd
+    // fence in that case.
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int mergeFd = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = mergeFd;
+    handle->data[3] = SYNC_FENCE_UNORDERED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateTwoFdUnorderedFence(fence, fd1, fd2, mergeFd);
+}
+
+void C2FenceTest::validateTwoFdUnorderedFence(
+        const C2Fence &fence, int fd1, int fd2, int mergeFd) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the merge fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(mergeFd));
+
+    // Verify that fds returns a duped versions of the initial fds (but not the merge fd)
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(2));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd1), IsDupOf(fd2)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 3);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd1));
+        EXPECT_THAT(handle->data[1], IsDupOf(fd2));
+        EXPECT_THAT(handle->data[2], IsDupOf(mergeFd));
+        EXPECT_EQ(handle->data[3], SYNC_FENCE_UNORDERED_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_one_valid_test_fd) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { -1, fd, -1 }, &status);
+    // if we only have one valid fd, we are not merging fences, so the test fds are not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_one_valid_test_fd_null_status) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { -1, fd, -1 });
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_multiple_fds) {
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { fd1, fd2, fd3 }, &status);
+    // test fds are not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+void C2FenceTest::validateThreeFdFence(const C2Fence &fence, int fd1, int fd2, int fd3) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the final fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(fd3));
+
+    // Verify that fds returns a duped versions of all 3 initial fds
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(3));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd1), IsDupOf(fd2), IsDupOf(fd3)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 3);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd1));
+        EXPECT_THAT(handle->data[1], IsDupOf(fd2));
+        EXPECT_THAT(handle->data[2], IsDupOf(fd3));
+        EXPECT_EQ(handle->data[3], SYNC_FENCE_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, BackwardCompat_UDC_sync_fence) {
+    // Create a single SyncFence from a UDC native handle
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(1 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q1_single_fd_fence) {
+    // Create a single SyncFence from a 24Q1 native handle
+    // This had the same (albeit separately duped) fd twice, and used the legacy
+    // magic number.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(2 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = dup(fd);
+    handle->data[2] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q3_single_fd_fence) {
+    // Create a single SyncFence from the defined native handle
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(1 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = SYNC_FENCE_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q1_multi_fd_fence) {
+    // Create a single SyncFence from a 24Q1 era native handle with
+    // the legacy magic number.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int mergeFd = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = mergeFd;
+    handle->data[3] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateTwoFdUnorderedFence(fence, fd1, fd2, mergeFd);
+}
+
+// No need to create BackwardCompat_24Q3_unordered_multi_fd_fence because
+// we are creating that fence already from the 24Q3 native handle layout
+// in the UnorderedMultiSyncFence_with_multiple_fds test.
+
+TEST_F(C2FenceTest, BackwardCompat_24Q3_multi_fd_fence) {
+    // Create a single SyncFence from a 24Q1 era native handle with
+    // the legacy magic number.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = fd3;
+    handle->data[3] = SYNC_FENCE_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+} // namespace android
diff --git a/media/codec2/vndk/Android.bp b/media/codec2/vndk/Android.bp
index 9f57bfd..dc06ee6 100644
--- a/media/codec2/vndk/Android.bp
+++ b/media/codec2/vndk/Android.bp
@@ -53,6 +53,7 @@
     ],
 
     defaults: [
+	"aconfig_lib_cc_static_link.defaults",
         "libcodec2_hal_selection",
     ],
 
diff --git a/media/codec2/vndk/C2Buffer.cpp b/media/codec2/vndk/C2Buffer.cpp
index 7b9b80d..bff953d 100644
--- a/media/codec2/vndk/C2Buffer.cpp
+++ b/media/codec2/vndk/C2Buffer.cpp
@@ -1311,8 +1311,7 @@
                 for (size_t planeIx = 0; planeIx < mLayout.numPlanes; ++planeIx) {
                     const uint32_t colSampling = mLayout.planes[planeIx].colSampling;
                     const uint32_t rowSampling = mLayout.planes[planeIx].rowSampling;
-                    if (crop.left % colSampling || crop.right() % colSampling
-                            || crop.top % rowSampling || crop.bottom() % rowSampling) {
+                    if (crop.left % colSampling || crop.top % rowSampling) {
                         // cannot calculate data pointer
                         mImpl->getAllocation()->unmap(mData, crop, nullptr);
                         memset(&mLayout, 0, sizeof(mLayout));
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 5d50fc3..d28f926 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -28,16 +28,27 @@
 
 #include <utility>
 
-#define MAX_FENCE_FDS 1
+// support up to 32 sync fds (and an optional merged fd), and 1 int
+#define MAX_FENCE_FDS  33
+#define MAX_FENCE_INTS 1
 
 class C2Fence::Impl {
 public:
-    enum type_t : uint32_t {
-        INVALID_FENCE,
-        NULL_FENCE,
-        SURFACE_FENCE,
-        SYNC_FENCE,
-        PIPE_FENCE,
+    // These enums are not part of the ABI, so can be changed.
+    enum type_t : int32_t {
+        INVALID_FENCE     = -1,
+        NULL_FENCE        = 0,
+        SURFACE_FENCE     = 2,
+
+        SYNC_FENCE        = 3,
+        PIPE_FENCE        = 4,
+    };
+
+    // magic numbers for native handles
+    enum : int32_t {
+        SYNC_FENCE_DEPRECATED_MAGIC     = 3,
+        SYNC_FENCE_UNORDERED_MAGIC      = '\302fsu',
+        SYNC_FENCE_MAGIC                = '\302fso',
     };
 
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
@@ -54,7 +65,8 @@
 
     /**
      * Create a native handle for the fence so it can be marshalled.
-     * The native handle must store fence type in the first integer.
+     * All native handles must store fence type in the last integer.
+     * The created native handle (if not null) must be closed by the caller.
      *
      * \return a valid native handle if the fence can be marshalled, otherwise return null.
      */
@@ -64,11 +76,29 @@
 
     Impl() = default;
 
+    /**
+     * Get the type of the fence from the native handle.
+     *
+     * \param nh the native handle to get the type from.
+     * \return the type of the fence, or INVALID_FENCE if the native handle is
+     * invalid or malformed.
+     */
     static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
-        if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
-            return static_cast<type_t>(nh->data[nh->numFds]);
+        if (!nh || nh->numFds < 0 || nh->numFds > MAX_FENCE_FDS
+                || nh->numInts < 1 || nh->numInts > MAX_FENCE_INTS) {
+            return INVALID_FENCE;
         }
-        return INVALID_FENCE;
+
+        // the magic number for Codec 2.0 native handles is the last integer
+        switch (nh->data[nh->numFds + nh->numInts - 1]) {
+            case SYNC_FENCE_MAGIC:
+            case SYNC_FENCE_UNORDERED_MAGIC:
+            case SYNC_FENCE_DEPRECATED_MAGIC:
+                return SYNC_FENCE;
+
+            default:
+                return INVALID_FENCE;
+        }
     }
 };
 
@@ -189,6 +219,53 @@
 
 using namespace android;
 
+/**
+ * Implementation for a sync fence.
+ *
+ * A sync fence is fundamentally a fence that is created from an android sync
+ * fd (which represents a HW fence).
+ *
+ * The native handle layout for a single sync fence is:
+ *   fd[0]  - sync fd
+ *   int[0] - magic (SYNC_FENCE_MAGIC (=`\302fso'))
+ *
+ * Note: Between Android T and 24Q3, the magic number was erroneously
+ * SYNC_FENCE (=3).
+ *
+ * Multi(ple) Sync Fences
+ *
+ * Since Android 24Q3, this implementation also supports a sequence of
+ * sync fences. When this is the case, there is an expectation that the last
+ * sync fence being ready will guarantee that all other sync fences are
+ * also ready. (This guarantees backward compatibility to a single fd sync fence,
+ * and mFence will be that final fence.)
+ *
+ * It is furthermore recommended that the fences be in order - either by
+ * expected signaling time, or by the order in which they need to be ready. The
+ * specific ordering is not specified or enforced, but it could be an
+ * implementation requirement of the specific use case in the future.
+ *
+ * This implementation also supports an unordered set of sync fences. In this
+ * case, it will merge all the fences into a single merged fence, which will
+ * be the backward compatible singular fence (stored in mFence).
+ *
+ * The native handle layout for an unordered multi-fence sync fence (from Android
+ * 24Q3) is:
+ *
+ *   fd[0]   - sync fd 1
+ *   ...
+ *   fd[n-1] - sync fd N
+ *   fd[n]   - merged fence fd
+ *   int[0]  - magic (SYNC_FENCE_UNORDERED_MAGIC (='\302fsu'))
+ *
+ * The native handle layout for an ordered multi-fence sync fence (from Android
+ * 24Q3) is:
+ *
+ *   fd[0]   - sync fd 1
+ *   ...
+ *   fd[n-1] - sync fd N
+ *   int[0]  - magic (SYNC_FENCE_MAGIC (='\302fso'))
+ */
 class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
 public:
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
@@ -218,11 +295,19 @@
         return mFence->dup();
     }
 
+    /**
+     * Returns a duped list of fds used when creating this fence. It will
+     * not return the internally created merged fence fd.
+     */
     std::vector<int> fds() const {
         std::vector<int> retFds;
         for (int index = 0; index < mListFences.size(); index++) {
             retFds.push_back(mListFences[index]->dup());
         }
+        // ensure that at least one fd is returned
+        if (mListFences.empty()) {
+            retFds.push_back(mFence->dup());
+        }
         return retFds;
     }
 
@@ -236,7 +321,18 @@
 
     virtual native_handle_t *createNativeHandle() const {
         std::vector<int> nativeFds = fds();
-        nativeFds.push_back(fd());
+        int32_t magic = SYNC_FENCE_MAGIC;
+
+        // Also parcel the singular fence if it is not already part of the list.
+        // If this was a single-fd fence, mListFences will be empty, but fds()
+        // already returned that a list with that single fd.
+        if (!mListFences.empty() && mListFences.back() != mFence) {
+            nativeFds.push_back(fd());
+            if (!mListFences.empty()) {
+                magic = SYNC_FENCE_UNORDERED_MAGIC;
+            }
+        }
+
         native_handle_t* nh = native_handle_create(nativeFds.size(), 1);
         if (!nh) {
             ALOGE("Failed to allocate native handle for sync fence");
@@ -249,71 +345,122 @@
         for (int i = 0; i < nativeFds.size(); i++) {
             nh->data[i] = nativeFds[i];
         }
-        nh->data[nativeFds.size()] = type();
+        nh->data[nativeFds.size()] = magic;
         return nh;
     }
 
     virtual ~SyncFenceImpl() {};
 
+    /**
+     * Constructs a SyncFenceImpl from a single sync fd. No error checking is
+     * performed on the fd here as we cannot make this a null fence.
+     *
+     * \param fenceFd the fence fd to create the SyncFenceImpl from.
+     */
     SyncFenceImpl(int fenceFd) :
         mFence(sp<Fence>::make(fenceFd)) {
-        mListFences.clear();
-        if (mFence) {
-            mListFences.push_back(mFence);
-        }
     }
 
-    SyncFenceImpl(const std::vector<int>& fenceFds, int mergedFd) {
-        mListFences.clear();
-
-        for (int fenceFd : fenceFds) {
-            if (fenceFd < 0) {
-                continue;
-            } else {
-                mListFences.push_back(sp<Fence>::make(fenceFd));
-                if (!mListFences.back()) {
-                    mFence.clear();
-                    break;
-                }
-                if (mergedFd == -1) {
-                    mFence = (mFence == nullptr) ? (mListFences.back()) :
-                        (Fence::merge("syncFence", mFence, mListFences.back()));
-                }
-            }
-        }
-        if (mergedFd != -1)
-        {
-            mFence = sp<Fence>::make(mergedFd);
-        }
-        if (!mFence) {
-            mListFences.clear();
-        }
+    SyncFenceImpl(const sp<Fence> &fence) :
+        mFence(fence) {
     }
 
-    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
-        if (!nh || nh->numFds < 1 || nh->numInts < 1) {
-            ALOGE("Invalid handle for sync fence");
+    /**
+     * Constructs a SyncFenceImpl from a list of sync fds.
+     *
+     * \param fenceFds the list of fence fds to create the SyncFenceImpl from.
+     * \param finalFence the singular fence for this multi-fd fence. This can
+     * be either the last fence in fences or a sepearate (merged) fence.
+     */
+    SyncFenceImpl(const std::vector<sp<Fence>>& fences, const sp<Fence> &finalFence) :
+        mListFences(fences),
+        mFence(finalFence) {
+    }
+
+    /**
+     * Creates a SyncFenceImpl from a native handle.
+     *
+     * \param nh the native handle to create the SyncFenceImpl from.
+     * \param takeOwnership if true, the SyncFenceImpl will take ownership of the
+     *                      file descriptors in the native handle. Otherwise,
+     *                      the SyncFenceImpl will dup the file descriptors.
+     *
+     * \return a shared_ptr to the SyncFenceImpl, or nullptr if the native
+     * handle is invalid or malformed.
+    */
+    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(
+            const native_handle_t* nh, bool takeOwnership) {
+        // we should only call this method if _C2FenceFactory::GetTypeFromNativeHandle
+        // returned SYNC_FENCE, but do these checks anyways to avoid overflows
+        // in case that does not happen.
+        if (!nh) {
+            ALOGE("Invalid handle for a sync fence (nullptr)");
+            return nullptr;
+        } else if (nh->numFds < 1 || nh->numInts < 1
+                || nh->numFds > MAX_FENCE_FDS || nh->numInts > MAX_FENCE_INTS) {
+            ALOGE("Invalid handle for a sync fence (%d fds, %d ints)", nh->numFds, nh->numInts);
             return nullptr;
         }
-        std::vector<int> fds;
-        for (int i = 0; i < nh->numFds-1; i++) {
-            fds.push_back(dup(nh->data[i]));
-        }
-        std::shared_ptr<SyncFenceImpl> p = (nh->numFds == 1)?
-                (std::make_shared<SyncFenceImpl>(fds.back())):
-                (std::make_shared<SyncFenceImpl>(fds, (dup(nh->data[nh->numFds-1]))));
-        if (!p) {
-            ALOGE("Failed to allocate sync fence impl");
-            for (int fd : fds) {
-                close(fd);
+        std::vector<sp<Fence>> fences;
+        for (int i = 0; i < nh->numFds; i++) {
+            int fd = nh->data[i];
+            if (!takeOwnership && fd >= 0) {
+                fd = dup(fd);
+            }
+            if (fd >= 0) {
+                sp<Fence> fence = sp<Fence>::make(fd);
+                if (fence) {
+                    fences.push_back(fence);
+                } else {
+                    ALOGW("Failed to create fence from fd %d", fd);
+                }
             }
         }
+
+        std::shared_ptr<SyncFenceImpl> p;
+        if (fences.size() == 0) {
+            ALOGE("No valid fences found in handle for a sync fence");
+            return nullptr;
+        } else if (fences.size() == 1) {
+            p = std::make_shared<SyncFenceImpl>(fences[0]);
+        } else {
+            int32_t magic = nh->data[nh->numFds + nh->numInts - 1];
+            if (magic != SYNC_FENCE_MAGIC) {
+                // The last fence is the merged fence. Separate it.
+                sp<Fence> finalFence = fences.back();
+                fences.pop_back();
+
+                // Special case: if we end up with only a single element list
+                // with another merged fence, that merged fence must be the
+                // same fence. This happened in an early version of multi fd
+                // support for single-fd sync fences.
+                if (fences.size() == 1) {
+                    // For single-fd fence the sp-s must be equal
+                    finalFence = fences.back();
+                }
+                p = std::make_shared<SyncFenceImpl>(fences, finalFence);
+            } else {
+                // Use the last fence as the standalone fence.
+                p = std::make_shared<SyncFenceImpl>(fences, fences.back());
+            }
+        }
+
+        ALOGE_IF(!p, "Failed to allocate sync fence impl");
         return p;
     }
 
 private:
+    /**
+     * The list of fences in case of a multi-fence sync fence. Otherwise, this
+     * list is empty.
+     */
     std::vector<sp<Fence>> mListFences;
-    sp<Fence> mFence;  //merged fence in case mListFences size > 0
+
+    /**
+     * The singular fence for this sync fence. For multi-fence sync fences,
+     * this could be a merged fence, or simply the final fence.
+     */
+    sp<Fence> mFence;
 };
 
 std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence) {
@@ -324,39 +471,155 @@
     return retFds;
 }
 
-C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd, bool validate) {
     std::shared_ptr<C2Fence::Impl> p;
     if (fenceFd >= 0) {
         p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
         if (!p) {
             ALOGE("Failed to allocate sync fence impl");
             close(fenceFd);
-        } else if (!p->valid()) {
+        } else if (validate && (!p->valid() || p->ready())) {
+            // don't create a fence object if the sync fd already signaled or is invalid
             p.reset();
         }
     } else {
-        ALOGV("Create sync fence from invalid fd");
-        return C2Fence();
+        ALOGV("Won't create sync fence from invalid fd");
     }
     return C2Fence(p);
 }
 
-C2Fence _C2FenceFactory::CreateMultipleFdSyncFence(const std::vector<int>& fenceFds) {
-    std::shared_ptr<C2Fence::Impl> p;
-    if (fenceFds.size() > 0) {
-        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFds, -1);
-        if (!p) {
-            ALOGE("Failed to allocate sync fence impl closing FDs");
-            for (int fenceFd : fenceFds) {
-                close(fenceFd);
-            }
-        } else if (!p->valid()) {
-            ALOGE("Invalid sync fence created");
-            p.reset();
-        }
-    } else {
-        ALOGE("Create sync fence from invalid fd list of size 0");
+C2Fence _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        const std::vector<int>& fenceFds, c2_status_t *status) {
+    if (status) {
+        *status = C2_OK;
     }
+
+    sp<Fence> finalFence;
+    std::vector<sp<Fence>> fences;
+
+    bool mergeFailed = false;
+    for (int fenceFd : fenceFds) {
+        if (fenceFd < 0) {
+            // ignore invalid fences
+            continue;
+        }
+        sp<Fence> fence = sp<Fence>::make(fenceFd);
+
+        // If we could not create an sp, further sp-s will also fail.
+        if (fence == nullptr) {
+            if (status) {
+                *status = C2_NO_MEMORY;
+            }
+            break;
+        }
+        fences.push_back(fence);
+
+        if (finalFence == nullptr) {
+            finalFence = fence;
+        } else {
+            sp<Fence> mergedFence = Fence::merge("syncFence", finalFence, fence);
+            if (mergedFence == nullptr || mergedFence == Fence::NO_FENCE) {
+                ALOGE_IF(!mergeFailed, "Could not merge fences for sync fence.");
+                mergeFailed = true;
+                if (status) {
+                    *status = (mergedFence == nullptr) ? C2_NO_MEMORY : C2_CORRUPTED;
+                }
+
+                if (mergedFence == nullptr) {
+                    break;
+                }
+                // If we cannot merge one of the fences, the best course of action
+                // is to keep going, as the alternative would be to clear all fences
+                // (making this a null fence) but that will always be ready.
+            } else {
+                finalFence = mergedFence;
+            }
+        }
+    }
+
+    // we may have ended up with a single or no fence due to merging failures or
+    // invalid fds.
+    if (fences.size() == 0) {
+        // we have no fds, we have a null fence.
+        return C2Fence();
+    }
+
+    std::shared_ptr<C2Fence::Impl> p;
+
+    if (fences.size() == 1) {
+        // We have a single sync fd. We don't need the merged fence, which is
+        // already simply that sole fence.
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(finalFence);
+    } else {
+        // if we couldn't merge any fences just use the last one
+        if (finalFence == fences[0]) {
+            finalFence = fences.back();
+        }
+
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences, finalFence);
+    }
+
+    if (!p) {
+        ALOGE("Failed to allocate sync fence impl closing FDs");
+        // all fds were moved into Fence objects which will close them.
+        if (status) {
+            *status = C2_NO_MEMORY;
+        }
+        return C2Fence();
+    }
+
+    return C2Fence(p);
+}
+
+C2Fence _C2FenceFactory::CreateMultiSyncFence(
+        const std::vector<int>& fenceFds, c2_status_t *status) {
+    if (status) {
+        *status = C2_OK;
+    }
+
+    std::vector<sp<Fence>> fences;
+
+    for (int fenceFd : fenceFds) {
+        if (fenceFd < 0) {
+            // ignore invalid fences
+            continue;
+        }
+        sp<Fence> fence = sp<Fence>::make(fenceFd);
+
+        // If we could not create an sp, keep going with the existing fences.
+        if (fence == nullptr) {
+            if (status) {
+                *status = C2_NO_MEMORY;
+            }
+            break;
+        }
+        fences.push_back(fence);
+    }
+
+    // we may have ended up with a single or no fence due to invalid fds.
+    if (fences.size() == 0) {
+        // we have no fds, we have a null fence.
+        return C2Fence();
+    }
+
+    std::shared_ptr<C2Fence::Impl> p;
+
+    if (fences.size() == 1) {
+        // We have a single sync fd, this is a simple sync fence.
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences[0]);
+    } else {
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences, fences.back());
+    }
+
+    if (!p) {
+        ALOGE("Failed to allocate sync fence impl closing FDs");
+        // all fds were moved into Fence objects which will close them.
+        if (status) {
+            *status = C2_NO_MEMORY;
+        }
+        return C2Fence();
+    }
+
     return C2Fence(p);
 }
 
@@ -521,7 +784,8 @@
     return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
 }
 
-C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+C2Fence _C2FenceFactory::CreateFromNativeHandle(
+        const native_handle_t* handle, bool takeOwnership) {
     if (!handle) {
         return C2Fence();
     }
@@ -529,12 +793,14 @@
     std::shared_ptr<C2Fence::Impl> p;
     switch (type) {
         case C2Fence::Impl::SYNC_FENCE:
-            p = SyncFenceImpl::CreateFromNativeHandle(handle);
+            p = SyncFenceImpl::CreateFromNativeHandle(handle, takeOwnership);
             break;
         default:
             ALOGV("Unsupported fence type %d", type);
-            // If this is malformed-handle close the handle here.
-            (void) native_handle_close(handle);
+            // Still close the handle here if taking ownership.
+            if (takeOwnership) {
+                (void) native_handle_close(handle);
+            }
             // return a null-fence in this case
             break;
     }
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index e7fd14f..0987da2 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -478,13 +478,25 @@
 
 class _C2BlockPoolCache {
 public:
-    _C2BlockPoolCache() : mBlockPoolSeqId(C2BlockPool::PLATFORM_START + 1) {}
+    _C2BlockPoolCache() : mBlockPoolSeqId(C2BlockPool::PLATFORM_START + 1) {
+        mBqPoolDeferDeallocAfterStop = false;
+#ifdef __ANDROID_APEX__
+        bool stopHalBeforeSurface = ::android::base::GetBoolProperty(
+                "debug.codec2.stop_hal_before_surface", false);
+        if (!stopHalBeforeSurface) {
+            mBqPoolDeferDeallocAfterStop =
+                    ::android::base::GetIntProperty(
+                            "debug.codec2.bqpool_dealloc_after_stop", 0) != 0;
+        }
+#endif
+    }
 
 private:
     c2_status_t _createBlockPool(
             C2PlatformAllocatorDesc &allocatorParam,
             std::vector<std::shared_ptr<const C2Component>> components,
             C2BlockPool::local_id_t poolId,
+            bool deferDeallocAfterStop,
             std::shared_ptr<C2BlockPool> *pool) {
         std::shared_ptr<C2AllocatorStore> allocatorStore =
                 GetCodec2PlatformAllocatorStore();
@@ -548,6 +560,11 @@
                 if (res == C2_OK) {
                     std::shared_ptr<C2BlockPool> ptr(
                             new C2BufferQueueBlockPool(allocator, poolId), deleter);
+                    if (deferDeallocAfterStop) {
+                        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                            std::static_pointer_cast<C2BufferQueueBlockPool>(ptr);
+                        bqPool->setDeferDeallocationAfterStop();
+                    }
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
                     mComponents[poolId].insert(
@@ -603,7 +620,8 @@
             std::vector<std::shared_ptr<const C2Component>> components,
             std::shared_ptr<C2BlockPool> *pool) {
         std::unique_lock lock(mMutex);
-        return _createBlockPool(allocator, components, mBlockPoolSeqId++, pool);
+        return _createBlockPool(allocator, components, mBlockPoolSeqId++,
+                                mBqPoolDeferDeallocAfterStop, pool);
     }
 
 
@@ -638,7 +656,7 @@
             C2PlatformAllocatorDesc allocator;
             allocator.allocatorId = C2PlatformAllocatorStore::BUFFERQUEUE;
             return _createBlockPool(
-                    allocator, {component}, blockPoolId, pool);
+                    allocator, {component}, blockPoolId, mBqPoolDeferDeallocAfterStop, pool);
         }
         return C2_NOT_FOUND;
     }
@@ -651,6 +669,8 @@
 
     std::map<C2BlockPool::local_id_t, std::weak_ptr<C2BlockPool>> mBlockPools;
     std::map<C2BlockPool::local_id_t, std::vector<std::weak_ptr<const C2Component>>> mComponents;
+
+    bool mBqPoolDeferDeallocAfterStop;
 };
 
 static std::unique_ptr<_C2BlockPoolCache> sBlockPoolCache =
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 320b192..806932c 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -28,6 +28,94 @@
 class GraphicBuffer;
 }  // namespace android
 
+/**
+ * BufferQueue based BlockPool.
+ *
+ * This creates graphic blocks from BufferQueue. BufferQueue here is HIDL-ized IGBP.
+ * HIDL-ized IGBP enables vendor HAL to call IGBP interfaces via HIDL over process boundary.
+ * HIDL-ized IGBP is called as HGBP. HGBP had been used from multiple places in android,
+ * but now this is the only place HGBP is still used.
+ *
+ * Initially there is no HGBP configured, in the case graphic blocks are allocated
+ * from gralloc directly upon \fetchGraphicBlock() requests.
+ *
+ * HGBP can be configured as null as well, in the case graphic blocks are allocated
+ * from gralloc directly upon \fetchGraphicBlock() requests.
+ *
+ * If a specific HGBP is configured, the HGBP acts as an allocator for creating graphic blocks.
+ *
+ *
+ * HGBP/IGBP and the BlockPool
+ *
+ * GraphicBuffer(s) from BufferQueue(IGBP/IGBC) are based on slot id.
+ * A created GraphicBuffer occupies a slot(so the GraphicBuffer has a slot-id).
+ * A GraphicBuffer is produced and consumed and recyled based on the slot-id
+ * w.r.t. BufferQueue.
+ *
+ * HGBP::dequeueBuffer() returns a slot id where the slot has an available GraphicBuffer.
+ * If it is necessary, HGBP allocates a new GraphicBuffer to the slot and indicates
+ * that a new buffer is allocated as return flag.
+ * To retrieve the GraphicBuffer, HGBP::requestBuffer() along with the slot id
+ * is required. In order to save HGBP remote calls, the blockpool caches the
+ * allocated GraphicBuffer(s) along with the slot information.
+ *
+ * The blockpool provides C2GraphicBlock upon \fetchGraphicBlock().
+ * The C2GraphicBlock has a native handle, which is extracted from a GraphicBuffer
+ * and then cloned for independent life-cycle with the GraphicBuffer. The GraphicBuffer
+ * is allocated by HGBP::dequeueBuffer() and retrieved by HGBP::requestBuffer()
+ * if there is a HGBP configured.
+ *
+ *
+ * Life-cycle of C2GraphicBlock
+ *
+ * The decoder HAL writes a decoded frame into C2GraphicBlock. Upon
+ * completion, the component sends the block to the client in the remote process
+ * (i.e. to MediaCodec). The remote process renders the frame into the output surface
+ * via IGBP::queueBuffer() (Note: this is not hidlized.).
+ *
+ * If the decoder HAL destroys the C2GraphicBlock without transferring to the
+ * client, the destroy request goes to the BlockPool. Then
+ * the BlockPool free the associated GraphicBuffer from a slot to
+ * HGBP in order to recycle via HGBP::cancelBuffer().
+ *
+ *
+ * Clearing the Cache(GraphicBuffer)
+ *
+ * When the output surface is switched to a new surface, The GraphicBuffers from
+ * the old surface is either migrated or cleared.
+ *
+ * The GraphicBuffer(s) still in use are migrated to a new surface during
+ * configuration via HGBP::attachBuffer(). The GraphicBuffer(s) not in use are
+ * cleared from the cache inside the BlockPool.
+ *
+ * When the surface is switched to a null surface, all the
+ * GraphicBuffers in the cache are cleared.
+ *
+ *
+ * Workaround w.r.t. b/322731059 (Deferring cleaning the cache)
+ *
+ * Some vendor devices have issues with graphic buffer lifecycle management,
+ * where the graphic buffers get released even when the cloned native handles
+ * in the remote process are not closed yet. This issue led to rare crashes
+ * for those devices when the cache is cleared early.
+ *
+ * We workarounded the crash by deferring the cleaning of the cache.
+ * The workaround is not enabled by default, and can be enabled via a
+ * system property as shown below:
+ *
+ *        'debug.codec2.bqpool_dealloc_after_stop' = 1
+ *
+ * Configuring the debug flag will call \::setDeferDeallocationAfterStop()
+ * after the blockpool creation. This will enable the deferring.
+ *
+ * After enabling the deferring, clearing the GraphicBuffer is delayed until
+ *  1) \::clearDeferredBlocks() is called.
+ *        Typically after HAL processes stop() request.
+ *  2) Or a new ::fetchGraphicBlock() is called.
+ *
+ *  Since the deferring will delay the deallocation, the deferring will result
+ *  in more memory consumption during the brief period.
+ */
 class C2BufferQueueBlockPool : public C2BlockPool {
 public:
     C2BufferQueueBlockPool(const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId);
@@ -77,6 +165,8 @@
      * is configured as nullptr, unique id which is bundled in native_handle is zero.
      *
      * \param producer      the IGBP, which will be used to fetch blocks
+     *                      This could be null, in the case this blockpool will
+     *                      allocate backed GraphicBuffer via allocator(gralloc).
      */
     virtual void configureProducer(const android::sp<HGraphicBufferProducer> &producer);
 
@@ -89,6 +179,8 @@
      * is configured as nullptr, unique id which is bundled in native_handle is zero.
      *
      * \param producer      the IGBP, which will be used to fetch blocks
+     *                      This could be null, in the case this blockpool will
+     *                      allocate backed GraphicBuffer via allocator(gralloc).
      * \param syncMemory    Shared memory for synchronization of allocation & deallocation.
      * \param bqId          Id of IGBP
      * \param generationId  Generation Id for rendering output
@@ -110,6 +202,26 @@
      */
     virtual void invalidate();
 
+    /**
+     * Defer deallocation of cached blocks.
+     *
+     * Deallocation of cached blocks will be deferred until
+     * \clearDeferredBlocks() is called. Or a new block allocation is
+     * requested by \fetchGraphicBlock().
+     */
+    void setDeferDeallocationAfterStop();
+
+
+    /**
+     * Clear deferred blocks.
+     *
+     * Deallocation of cached blocks can be deferred by
+     * \setDeferDeallocationAfterStop().
+     * clear(deallocate) those deferred cached blocks explicitly.
+     * Use this interface, if the blockpool could be inactive indefinitely.
+     */
+    void clearDeferredBlocks();
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index 4f974ca..cabd5d9 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -23,13 +23,19 @@
 #include <android-base/unique_fd.h>
 
 /*
- * Create a list of fds from fence
+ * Extract a list of sync fence fds from a potentially multi-sync C2Fence.
+ * This will return dupped file descriptors of the fences used to creating the
+ * sync fence. Specifically, for an unordered mult-sync fence, the merged
+ * singular fence will not be returned even though it is created aspart of
+ * constructing the C2Fence object. On the other hand, for a single fd sync
+ * fence, the returned list will contain the sole file descriptor.
  *
  * \param fence   C2Fence object from which associated
  *                file descriptors need to be extracted
- * \return a vector of fds otherwise return vector of size 0
+ * \return a vector of sync fence fds. This will be a vector of size 0 if C2Fence
+ *         is not a sync fence. The caller is responsible for closing the
+ *         fds in the returned vector.
  */
-
 std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence);
 
 class C2SurfaceSyncMemory;
@@ -54,20 +60,76 @@
             uint32_t waitId);
 
     /*
-     * Create C2Fence from a fence file fd.
+     * Create C2Fence from a sync fence fd.
      *
-     * \param fenceFd           Fence file descriptor.
+     * \param fenceFd           Sync fence file descriptor.
      *                          It will be owned and closed by the returned fence object.
+     * \param validate          If true, the fence fd will be validated to ensure
+     *                          it is a valid pending sync fence fd.
      */
-    static C2Fence CreateSyncFence(int fenceFd);
+    static C2Fence CreateSyncFence(int fenceFd, bool validate = true);
 
     /*
-     * Create C2Fence from list of fence file fds.
+     * Create C2Fence from list of sync fence fds, while also merging them to
+     * create a singular fence, which can be used as a backward compatible sync
+     * fence.
      *
-     * \param fenceFds          Vector of file descriptor for fence.
-     *                          It will be owned and closed by the returned fence object.
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
      */
-    static C2Fence CreateMultipleFdSyncFence(const std::vector<int>& fenceFds);
+    [[deprecated("Use CreateUnorderedMultiSyncFence instead.")]]
+    static C2Fence CreateMultipleFdSyncFence(const std::vector<int>& fenceFds) {
+        return CreateUnorderedMultiSyncFence(fenceFds);
+    }
+
+    /*
+     * Create C2Fence from a list of unordered sync fence fds, while also merging
+     * them to create a singular fence, which can be used as a backward compatible
+     * sync fence.
+     *
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
+     * \param status     Optional pointer to a status field. If not null, it will be
+     *                   updated with the status of the operation. Possible values
+     *                   are:
+     *                   - C2_OK: The operation succeeded.
+     *                   - C2_NO_MEMORY: The operation failed because of lack of
+     *                     memory.
+     *                   - C2_CORRUPTED: The operation failed because the sync
+     *                     fence fds could bot be merged.
+     * \return           A C2Fence object representing the sync fence fds, or
+     *                   an empty C2Fence if the no C2Fence could be created.
+     *                   It is possible for the operation to fail but still return
+     *                   a possibly viable C2Fence object, e.g. if the merge
+     *                   operation failed only partially. Similarly, it is possible
+     *                   for the operation to succeed but still return an empty
+     *                   C2Fence object, e.g. if all fence fds were invalid.
+     */
+    static C2Fence CreateUnorderedMultiSyncFence(
+            const std::vector<int>& fenceFds, c2_status_t *status = nullptr /* nullable */);
+
+    /*
+     * Create C2Fence from a list of sync fence fds. Waiting for the last fence
+     * must guarantee that all other fences are also signaled.
+     *
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
+     * \param status     Optional pointer to a status field. If not null, it will be
+     *                   updated with the status of the operation. Possible values
+     *                   are:
+     *                   - C2_OK: The operation succeeded.
+     *                   - C2_NO_MEMORY: The operation failed because of lack of
+     *                     memory.
+     * \return           A C2Fence object representing the sync fence fds, or
+     *                   an empty C2Fence if the operation failed.  It is possible
+     *                   for the operation to succeed but still return an empty
+     *                   C2Fence object, e.g. if all fence fds were invalid.
+     */
+    static C2Fence CreateMultiSyncFence(
+            const std::vector<int>& fenceFds, c2_status_t *status = nullptr /* nullable */);
 
     /*
      * Create C2Fence from an fd created by pipe()/pipe2() syscall.
@@ -97,13 +159,18 @@
 
     /*
      * Create C2Fence from a native handle.
-
+     *
      * \param handle           A native handle representing a fence
-     *                         The fd in the native handle will be duplicated, so the caller will
-     *                         still own the handle and have to close it.
+     * \param takeOwnership    If true, the native handle and the file descriptors
+     *                         within will be owned by the returned fence object.
+     *                         If false (default), the caller will still own the
+     *                         handle and its file descriptors and will have to
+     *                         close it.
+     *                         In either case the caller is responsible for
+     *                         deleting the native handle.
      */
-    static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
+    static C2Fence CreateFromNativeHandle(
+            const native_handle_t* handle, bool takeOwnership = false);
 };
 
-
 #endif // STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 48157c8..665f9fc 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -588,11 +588,22 @@
         return C2_BAD_VALUE;
     }
 
+    void clearDeferredBlocks_l() {
+        if (mHavingDeallocationDeferred) {
+            mHavingDeallocationDeferred = false;
+            for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
+                mBuffersWithDeallocationDeferred[i].clear();
+            }
+        }
+    }
+
 public:
     Impl(const std::shared_ptr<C2Allocator> &allocator)
         : mInit(C2_OK), mProducerId(0), mGeneration(0),
           mConsumerUsage(0), mDqFailure(0), mLastDqTs(0),
-          mLastDqLogTs(0), mAllocator(allocator), mIgbpValidityToken(std::make_shared<int>(0)) {
+          mLastDqLogTs(0), mAllocator(allocator),
+          mDeferDeallocationAfterStop(false),
+          mHavingDeallocationDeferred(false), mIgbpValidityToken(std::make_shared<int>(0)) {
     }
 
     ~Impl() {
@@ -634,6 +645,7 @@
             }
         }
         if (mProducerId == 0) {
+            clearDeferredBlocks_l();
             std::shared_ptr<C2GraphicAllocation> alloc;
             c2_status_t err = mAllocator->newGraphicAllocation(
                     width, height, format, usage, &alloc);
@@ -692,6 +704,7 @@
                            uint32_t generation,
                            uint64_t usage,
                            bool bqInformation) {
+        bool toNullSurface = false;
         std::shared_ptr<C2SurfaceSyncMemory> c2SyncMem;
         if (syncHandle) {
             if (!producer) {
@@ -714,6 +727,9 @@
                 mProducerId = producerId;
                 mGeneration = bqInformation ? generation : 0;
             } else {
+                if (mProducer) {
+                    toNullSurface = true;
+                }
                 mProducer = nullptr;
                 mProducerId = 0;
                 mGeneration = 0;
@@ -760,6 +776,17 @@
                 // old buffers should not be cancelled since the associated IGBP
                 // is no longer valid.
                 mIgbpValidityToken = std::make_shared<int>(0);
+                if (mDeferDeallocationAfterStop) {
+                    if (toNullSurface) {
+                        mHavingDeallocationDeferred = true;
+                        for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
+                            mBuffersWithDeallocationDeferred[i] = mBuffers[i];
+                        }
+                    }
+                }
+            }
+            if (!toNullSurface) {
+                clearDeferredBlocks_l();
             }
             if (mInvalidated) {
                 mIgbpValidityToken = std::make_shared<int>(0);
@@ -811,6 +838,16 @@
         }
     }
 
+    void setDeferDeallocationAfterStop() {
+        std::scoped_lock<std::mutex> lock(mMutex);
+        mDeferDeallocationAfterStop = true;
+    }
+
+    void clearDeferredBlocks() {
+        std::scoped_lock<std::mutex> lock(mMutex);
+        clearDeferredBlocks_l();
+    }
+
 private:
     friend struct C2BufferQueueBlockPoolData;
 
@@ -833,6 +870,14 @@
     sp<GraphicBuffer> mBuffers[NUM_BUFFER_SLOTS];
     std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
 
+    // In order to workaround b/322731059,
+    // deallocating buffers due to stop using the current surface
+    // could be deferred until the component calling stop or a
+    // new allocation being requested.
+    bool mDeferDeallocationAfterStop;
+    bool mHavingDeallocationDeferred;
+    sp<GraphicBuffer> mBuffersWithDeallocationDeferred[NUM_BUFFER_SLOTS];
+
     std::mutex mSyncMemMutex;
     std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
     std::shared_ptr<C2SurfaceSyncMemory> mOldMem;
@@ -1178,3 +1223,15 @@
     }
 }
 
+void C2BufferQueueBlockPool::setDeferDeallocationAfterStop() {
+    if (mImpl) {
+        mImpl->setDeferDeallocationAfterStop();
+    }
+}
+
+void C2BufferQueueBlockPool::clearDeferredBlocks() {
+    if (mImpl) {
+        mImpl->clearDeferredBlocks();
+    }
+}
+
diff --git a/media/libaaudio/Android.bp b/media/libaaudio/Android.bp
index 4b417a7..add28e0 100644
--- a/media/libaaudio/Android.bp
+++ b/media/libaaudio/Android.bp
@@ -36,9 +36,6 @@
     symbol_file: "src/libaaudio.map.txt",
     first_version: "26",
     unversioned_until: "current",
-    export_header_libs: [
-        "libAAudio_headers",
-    ],
 }
 
 cc_library_headers {
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 4affaed..6a35ced 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -323,7 +323,6 @@
     printf("      -C{channels}      number of input channels\n");
     printf("      -D{deviceId}      input device ID\n");
     printf("      -F{0,1,2}         input format, 1=I16, 2=FLOAT\n");
-    printf("      -g{gain}          recirculating loopback gain\n");
     printf("      -h{hangMillis}    occasionally hang in the callback\n");
     printf("      -P{inPerf}        set input AAUDIO_PERFORMANCE_MODE*\n");
     printf("          n for _NONE\n");
@@ -436,7 +435,6 @@
     int                   written                    = 0;
 
     int                   testMode                   = TEST_LATENCY;
-    double                gain                       = 1.0;
     int                   hangTimeMillis             = 0;
     std::string           report;
 
@@ -468,9 +466,6 @@
                     case 'F':
                         requestedInputFormat = atoi(&arg[2]);
                         break;
-                    case 'g':
-                        gain = atof(&arg[2]);
-                        break;
                     case 'h':
                         // Was there a number after the "-h"?
                         if (arg[2]) {
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index fc8ad77..a1551f8 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -46,6 +46,7 @@
     ],
     static_libs: [
         "aaudio-aidl-cpp",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 9d9b574..e19d526 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -574,7 +574,7 @@
      * For privacy, the following usages can not be recorded: AAUDIO_VOICE_COMMUNICATION*,
      * AAUDIO_USAGE_NOTIFICATION*, AAUDIO_USAGE_ASSISTANCE* and {@link #AAUDIO_USAGE_ASSISTANT}.
      *
-     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Build.VERSION_CODES</a>,
+     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Q</a>,
      * this means only {@link #AAUDIO_USAGE_MEDIA} and {@link #AAUDIO_USAGE_GAME} may be captured.
      *
      * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_ALL">
@@ -1115,6 +1115,17 @@
  *
  * The default, if you do not call this function, is {@link #AAUDIO_USAGE_MEDIA}.
  *
+ * If you set Usage then you will need to associate the volume keys with the resulting stream.
+ * Otherwise the volume keys may not work correctly.
+ * This is done in Java with the following code block.
+ *
+ * <pre><code>if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ *     AudioAttributes attributes = new AudioAttributes.Builder().setUsage(usage)
+ *             .setContentType(contentType).build();
+ *     setVolumeControlStream(attributes.getVolumeControlStream());
+ * }
+ * </code></pre>
+ *
  * Available since API level 28.
  *
  * @param builder reference provided by AAudio_createStreamBuilder()
@@ -1132,6 +1143,17 @@
  *
  * The default, if you do not call this function, is {@link #AAUDIO_CONTENT_TYPE_MUSIC}.
  *
+ * If you set ContentType then you will need to associate the volume keys with the resulting stream.
+ * Otherwise the volume keys may not work correctly.
+ * This is done in Java with the following code block.
+ *
+ * <pre><code>if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
+ *     AudioAttributes attributes = new AudioAttributes.Builder().setUsage(usage)
+ *             .setContentType(contentType).build();
+ *     setVolumeControlStream(attributes.getVolumeControlStream());
+ * }
+ * </code></pre>
+ *
  * Available since API level 28.
  *
  * @param builder reference provided by AAudio_createStreamBuilder()
diff --git a/media/libaaudio/src/binding/AAudioBinderClient.cpp b/media/libaaudio/src/binding/AAudioBinderClient.cpp
index 5f34a75..439d5af 100644
--- a/media/libaaudio/src/binding/AAudioBinderClient.cpp
+++ b/media/libaaudio/src/binding/AAudioBinderClient.cpp
@@ -71,18 +71,10 @@
     {
         Mutex::Autolock _l(mServiceLock);
         if (mAdapter == nullptr) {
-            sp<IBinder> binder;
             sp<IServiceManager> sm = defaultServiceManager();
-            // Try several times to get the service.
-            int retries = 4;
-            do {
-                binder = sm->getService(String16(AAUDIO_SERVICE_NAME)); // This will wait a while.
-                if (binder.get() != nullptr) {
-                    break;
-                }
-            } while (retries-- > 0);
+            sp<IBinder> binder = sm->waitForService(String16(AAUDIO_SERVICE_NAME));
 
-            if (binder.get() != nullptr) {
+            if (binder != nullptr) {
                 // Ask for notification if the service dies.
                 status_t status = binder->linkToDeath(mAAudioClient);
                 // TODO review what we should do if this fails
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index b2e93f0..fa3f5a0 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -620,17 +620,19 @@
                                                  audio_port_handle_t *portHandle) {
     ALOGV("%s() called", __func__);
     if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
+        ALOGE("%s() getServiceHandle() is invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_result_t result =  mServiceInterface.startClient(mServiceStreamHandleInfo,
                                                             client, attr, portHandle);
-    ALOGV("%s(%d) returning %d", __func__, *portHandle, result);
+    ALOGV("%s(), got %d, returning %d", __func__, *portHandle, result);
     return result;
 }
 
 aaudio_result_t AudioStreamInternal::stopClient(audio_port_handle_t portHandle) {
     ALOGV("%s(%d) called", __func__, portHandle);
     if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
+        ALOGE("%s(%d) getServiceHandle() is invalid", __func__, portHandle);
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_result_t result = mServiceInterface.stopClient(mServiceStreamHandleInfo, portHandle);
diff --git a/media/libaaudio/src/core/AAudioAudio.cpp b/media/libaaudio/src/core/AAudioAudio.cpp
index 1e27a81..1e8ac8d 100644
--- a/media/libaaudio/src/core/AAudioAudio.cpp
+++ b/media/libaaudio/src/core/AAudioAudio.cpp
@@ -602,6 +602,7 @@
 }
 
 AAUDIO_API aaudio_result_t AAudio_setMMapPolicy(aaudio_policy_t policy) {
+    ALOGD("%s(%d)", __func__, policy);
     return AudioGlobal_setMMapPolicy(policy);
 }
 
diff --git a/media/libaaudio/src/core/AAudioStreamParameters.cpp b/media/libaaudio/src/core/AAudioStreamParameters.cpp
index 3e51575..67fc668 100644
--- a/media/libaaudio/src/core/AAudioStreamParameters.cpp
+++ b/media/libaaudio/src/core/AAudioStreamParameters.cpp
@@ -23,13 +23,6 @@
 
 using namespace aaudio;
 
-// TODO These defines should be moved to a central place in audio.
-#define SAMPLES_PER_FRAME_MIN        1
-#define SAMPLES_PER_FRAME_MAX        FCC_LIMIT
-#define SAMPLE_RATE_HZ_MIN           8000
-// HDMI supports up to 32 channels at 1536000 Hz.
-#define SAMPLE_RATE_HZ_MAX           1600000
-
 void AAudioStreamParameters::copyFrom(const AAudioStreamParameters &other) {
     mSamplesPerFrame      = other.mSamplesPerFrame;
     mSampleRate           = other.mSampleRate;
@@ -73,8 +66,8 @@
 }
 
 aaudio_result_t AAudioStreamParameters::validate() const {
-    if (mSamplesPerFrame != AAUDIO_UNSPECIFIED
-        && (mSamplesPerFrame < SAMPLES_PER_FRAME_MIN || mSamplesPerFrame > SAMPLES_PER_FRAME_MAX)) {
+    if (mSamplesPerFrame != AAUDIO_UNSPECIFIED && (mSamplesPerFrame < CHANNEL_COUNT_MIN_AAUDIO ||
+                                                   mSamplesPerFrame > CHANNEL_COUNT_MAX_AAUDIO)) {
         ALOGD("channelCount out of range = %d", mSamplesPerFrame);
         return AAUDIO_ERROR_OUT_OF_RANGE;
     }
@@ -105,8 +98,8 @@
     aaudio_result_t result = isFormatValid (mAudioFormat);
     if (result != AAUDIO_OK) return result;
 
-    if (mSampleRate != AAUDIO_UNSPECIFIED
-        && (mSampleRate < SAMPLE_RATE_HZ_MIN || mSampleRate > SAMPLE_RATE_HZ_MAX)) {
+    if (mSampleRate != AAUDIO_UNSPECIFIED &&
+        (mSampleRate < SAMPLE_RATE_HZ_MIN_AAUDIO || mSampleRate > SAMPLE_RATE_HZ_MAX_IEC610937)) {
         ALOGD("sampleRate out of range = %d", mSampleRate);
         return AAUDIO_ERROR_INVALID_RATE;
     }
diff --git a/media/libaaudio/src/core/AudioStreamBuilder.cpp b/media/libaaudio/src/core/AudioStreamBuilder.cpp
index ac4e2b3..01f0038 100644
--- a/media/libaaudio/src/core/AudioStreamBuilder.cpp
+++ b/media/libaaudio/src/core/AudioStreamBuilder.cpp
@@ -24,6 +24,7 @@
 
 #include <aaudio/AAudio.h>
 #include <aaudio/AAudioTesting.h>
+#include <android/media/audio/common/AudioMMapPolicy.h>
 #include <android/media/audio/common/AudioMMapPolicyInfo.h>
 #include <android/media/audio/common/AudioMMapPolicyType.h>
 #include <media/AudioSystem.h>
@@ -40,21 +41,15 @@
 
 using namespace aaudio;
 
+using android::media::audio::common::AudioMMapPolicy;
 using android::media::audio::common::AudioMMapPolicyInfo;
 using android::media::audio::common::AudioMMapPolicyType;
 
 #define AAUDIO_MMAP_POLICY_DEFAULT             AAUDIO_POLICY_NEVER
 #define AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT   AAUDIO_POLICY_NEVER
+#define AAUDIO_MMAP_POLICY_DEFAULT_AIDL        AudioMMapPolicy::NEVER
+#define AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT_AIDL AudioMMapPolicy::NEVER
 
-// These values are for a pre-check before we ask the lower level service to open a stream.
-// So they are just outside the maximum conceivable range of value,
-// on the edge of being ridiculous.
-// TODO These defines should be moved to a central place in audio.
-#define SAMPLES_PER_FRAME_MIN        1
-#define SAMPLES_PER_FRAME_MAX        FCC_LIMIT
-#define SAMPLE_RATE_HZ_MIN           8000
-// HDMI supports up to 32 channels at 1536000 Hz.
-#define SAMPLE_RATE_HZ_MAX           1600000
 #define FRAMES_PER_DATA_CALLBACK_MIN 1
 #define FRAMES_PER_DATA_CALLBACK_MAX (1024 * 1024)
 
@@ -114,9 +109,12 @@
 
     std::vector<AudioMMapPolicyInfo> policyInfos;
     aaudio_policy_t mmapPolicy = AudioGlobal_getMMapPolicy();
-    if (android::AudioSystem::getMmapPolicyInfo(
-            AudioMMapPolicyType::DEFAULT, &policyInfos) == NO_ERROR) {
-        aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(policyInfos);
+    ALOGD("%s, global mmap policy is %d", __func__, mmapPolicy);
+    if (status_t status = android::AudioSystem::getMmapPolicyInfo(
+            AudioMMapPolicyType::DEFAULT, &policyInfos); status == NO_ERROR) {
+        aaudio_policy_t systemMmapPolicy = AAudio_getAAudioPolicy(
+                policyInfos, AAUDIO_MMAP_POLICY_DEFAULT_AIDL);
+        ALOGD("%s, system mmap policy is %d", __func__, systemMmapPolicy);
         if (mmapPolicy == AAUDIO_POLICY_ALWAYS && systemMmapPolicy == AAUDIO_POLICY_NEVER) {
             // No need to try as AAudioService is not created and the client only wants MMAP path.
             return AAUDIO_ERROR_NO_SERVICE;
@@ -129,6 +127,7 @@
             mmapPolicy = systemMmapPolicy;
         }
     } else {
+        ALOGD("%s, failed to query system mmap policy, error=%d", __func__, status);
         // If it fails querying mmap policy info, it is highly possible that the AAudioService is
         // not created. In this case, we don't try mmap path.
         if (mmapPolicy == AAUDIO_POLICY_ALWAYS) {
@@ -140,16 +139,22 @@
     if (mmapPolicy == AAUDIO_UNSPECIFIED) {
         mmapPolicy = AAUDIO_MMAP_POLICY_DEFAULT;
     }
+    ALOGD("%s, final mmap policy is %d", __func__, mmapPolicy);
 
     policyInfos.clear();
     aaudio_policy_t mmapExclusivePolicy = AAUDIO_UNSPECIFIED;
-    if (android::AudioSystem::getMmapPolicyInfo(
-            AudioMMapPolicyType::EXCLUSIVE, &policyInfos) == NO_ERROR) {
-        mmapExclusivePolicy = AAudio_getAAudioPolicy(policyInfos);
+    if (status_t status = android::AudioSystem::getMmapPolicyInfo(
+            AudioMMapPolicyType::EXCLUSIVE, &policyInfos); status == NO_ERROR) {
+        mmapExclusivePolicy = AAudio_getAAudioPolicy(
+                policyInfos, AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT_AIDL);
+        ALOGD("%s, system mmap exclusive policy is %d", __func__, mmapExclusivePolicy);
+    } else {
+        ALOGD("%s, failed to query mmap exclusive policy, error=%d", __func__, status);
     }
     if (mmapExclusivePolicy == AAUDIO_UNSPECIFIED) {
         mmapExclusivePolicy = AAUDIO_MMAP_EXCLUSIVE_POLICY_DEFAULT;
     }
+    ALOGD("%s, final mmap exclusive policy is %d", __func__, mmapExclusivePolicy);
 
     aaudio_sharing_mode_t sharingMode = getSharingMode();
     if ((sharingMode == AAUDIO_SHARING_MODE_EXCLUSIVE)
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
index a15fcb8..890057d 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.cpp
@@ -28,7 +28,8 @@
 
 void SampleRateConverter::reset() {
     FlowGraphNode::reset();
-    mInputCursor = kInitialCallCount;
+    mInputCallCount = kInitialCallCount;
+    mInputCursor = 0;
 }
 
 // Return true if there is a sample available.
diff --git a/media/libaaudio/src/flowgraph/SampleRateConverter.h b/media/libaaudio/src/flowgraph/SampleRateConverter.h
index f883e6c..a4318f0 100644
--- a/media/libaaudio/src/flowgraph/SampleRateConverter.h
+++ b/media/libaaudio/src/flowgraph/SampleRateConverter.h
@@ -54,7 +54,7 @@
     int32_t mNumValidInputFrames = 0; // number of valid frames currently in the input port buffer
     // We need our own callCount for upstream calls because calls occur at a different rate.
     // This means we cannot have cyclic graphs or merges that contain an SRC.
-    int64_t mInputCallCount = 0;
+    int64_t mInputCallCount = kInitialCallCount;
 
 };
 
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index 8595308..255bd0f 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -261,6 +261,11 @@
 
 void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
             audio_port_handle_t deviceId) {
+    // Check for an invalid deviceId. Why change to UNSPECIFIED?
+    if (deviceId == AAUDIO_UNSPECIFIED) {
+        ALOGE("%s(, deviceId = AAUDIO_UNSPECIFIED)! Why?", __func__);
+        return;
+    }
     // Device routing is a common source of errors and DISCONNECTS.
     // Please leave this log in place. If there is a bug then this might
     // get called after the stream has been deleted so log before we
diff --git a/media/libaaudio/src/utility/AAudioUtilities.cpp b/media/libaaudio/src/utility/AAudioUtilities.cpp
index 0cbf79d..3df23ee 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.cpp
+++ b/media/libaaudio/src/utility/AAudioUtilities.cpp
@@ -680,12 +680,16 @@
 
 } // namespace
 
-aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos) {
-    if (policyInfos.empty()) return AAUDIO_POLICY_AUTO;
-    for (size_t i = 1; i < policyInfos.size(); ++i) {
-        if (policyInfos.at(i).mmapPolicy != policyInfos.at(0).mmapPolicy) {
+aaudio_policy_t AAudio_getAAudioPolicy(const std::vector<AudioMMapPolicyInfo>& policyInfos,
+                                       AudioMMapPolicy defaultPolicy) {
+    AudioMMapPolicy policy = defaultPolicy;
+    for (const auto& policyInfo : policyInfos) {
+        if (policyInfo.mmapPolicy == AudioMMapPolicy::NEVER) {
+            policy = policyInfo.mmapPolicy;
+        } else if (policyInfo.mmapPolicy == AudioMMapPolicy::AUTO ||
+                   policyInfo.mmapPolicy == AudioMMapPolicy::ALWAYS) {
             return AAUDIO_POLICY_AUTO;
         }
     }
-    return aidl2legacy_aaudio_policy(policyInfos.at(0).mmapPolicy);
+    return aidl2legacy_aaudio_policy(policy);
 }
diff --git a/media/libaaudio/src/utility/AAudioUtilities.h b/media/libaaudio/src/utility/AAudioUtilities.h
index d44bbab..7c351e1 100644
--- a/media/libaaudio/src/utility/AAudioUtilities.h
+++ b/media/libaaudio/src/utility/AAudioUtilities.h
@@ -348,9 +348,19 @@
     AAUDIO_CHANNEL_INDEX_MASK_24 = AAUDIO_CHANNEL_BIT_INDEX | (1 << 24) - 1,
 };
 
-// The aaudio policy will be ALWAYS, NEVER, UNSPECIFIED only when all policy info are
-// ALWAYS, NEVER or UNSPECIFIED. Otherwise, the aaudio policy will be AUTO.
+/**
+ * Returns the aaudio mmap policy based on the vector of mmap policy info. The rule as
+ * 1. Returns AUTO if any of the policy is AUTO or ALWAYS
+ * 2. Returns NEVER if all of the policies are NEVER or UNSPECIFIED
+ * 3. Returns default policy if all of the policies are UNSPECIFIED
+ *
+ * @param policyInfos
+ * @param defaultPolicy
+ * @return
+ */
 aaudio_policy_t AAudio_getAAudioPolicy(
-        const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos);
+        const std::vector<android::media::audio::common::AudioMMapPolicyInfo>& policyInfos,
+        android::media::audio::common::AudioMMapPolicy defaultPolicy =
+                android::media::audio::common::AudioMMapPolicy::NEVER);
 
 #endif //UTILITY_AAUDIO_UTILITIES_H
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 04a8a45..61204ae 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -158,6 +158,7 @@
         "framework-permission-aidl-cpp",
         "libbinder",
         "libmediametrics",
+        "libmediautils",
         "spatializer-aidl-cpp",
     ],
 
@@ -315,6 +316,7 @@
         "aidl/android/media/DeviceConnectedState.aidl",
         "aidl/android/media/EffectDescriptor.aidl",
         "aidl/android/media/SurroundSoundConfig.aidl",
+        "aidl/android/media/TrackInternalMuteInfo.aidl",
         "aidl/android/media/TrackSecondaryOutputInfo.aidl",
     ],
     defaults: [
@@ -357,8 +359,6 @@
         "aidl/android/media/AudioMixerBehavior.aidl",
         "aidl/android/media/AudioOffloadMode.aidl",
         "aidl/android/media/AudioPolicyDeviceState.aidl",
-        "aidl/android/media/AudioPolicyForceUse.aidl",
-        "aidl/android/media/AudioPolicyForcedConfig.aidl",
         "aidl/android/media/AudioProductStrategy.aidl",
         "aidl/android/media/AudioVolumeGroup.aidl",
         "aidl/android/media/DeviceRole.aidl",
@@ -457,6 +457,7 @@
         "latest_android_media_audio_common_types_import_interface",
     ],
     imports: [
+        "audio-permission-aidl",
         "audioclient-types-aidl",
         "audiopolicy-types-aidl",
         "capture_state_listener-aidl",
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index f729e1b..5b954f7 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -79,6 +79,17 @@
     return NO_ERROR;
 }
 
+status_t AudioRecord::logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage,
+                                                const std::string& func) {
+    if (status != NO_ERROR) {
+        if (!func.empty()) mMediaMetrics.markError(status, func.c_str());
+        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
+        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
+    }
+    mStatus = status;
+    return mStatus;
+}
+
 // ---------------------------------------------------------------------------
 
 void AudioRecord::MediaMetrics::gather(const AudioRecord *record)
@@ -246,22 +257,43 @@
     if (pid == -1 || (callingPid != myPid)) {
         adjPid = callingPid;
     }
-    mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(adjPid));
-
+    auto clientAttributionSourcePid = legacy2aidl_pid_t_int32_t(adjPid);
+    if (!clientAttributionSourcePid.ok()) {
+        return logIfErrorAndReturnStatus(BAD_VALUE,
+                                         StringPrintf("%s: received invalid client attribution "
+                                                      "source pid, pid: %d, sessionId: %d",
+                                                      __func__, pid, sessionId),
+                                         __func__);
+    }
+    mClientAttributionSource.pid = clientAttributionSourcePid.value();
     uid_t adjUid = uid;
     if (uid == -1 || (callingPid != myPid)) {
         adjUid = IPCThreadState::self()->getCallingUid();
     }
-    mClientAttributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(adjUid));
+    auto clientAttributionSourceUid = legacy2aidl_uid_t_int32_t(adjUid);
+    if (!clientAttributionSourceUid.ok()) {
+        return logIfErrorAndReturnStatus(BAD_VALUE,
+                                         StringPrintf("%s: received invalid client attribution "
+                                                      "source uid, pid: %d, session id: %d",
+                                                      __func__, pid, sessionId),
+                                         __func__);
+    }
+    mClientAttributionSource.uid = clientAttributionSourceUid.value();
 
     mTracker.reset(new RecordingActivityTracker());
 
+    sp<IBinder> binder = defaultServiceManager()->checkService(String16("audio"));
+    if (binder != nullptr) {
+        // Barrier to ensure runtime permission update propagates to audioflinger
+        // Must be client-side
+        interface_cast<IAudioManager>(binder)->permissionUpdateBarrier();
+    }
+
     mSelectedDeviceId = selectedDeviceId;
     mSelectedMicDirection = selectedMicDirection;
     mSelectedMicFieldDimension = microphoneFieldDimension;
     mMaxSharedAudioHistoryMs = maxSharedAudioHistoryMs;
 
-    std::string errorMessage;
     // Copy the state variables early so they are available for error reporting.
     if (pAttributes == nullptr) {
         mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
@@ -304,38 +336,48 @@
         break;
     case TRANSFER_CALLBACK:
         if (callback == nullptr) {
-            errorMessage = StringPrintf(
-                    "%s: Transfer type TRANSFER_CALLBACK but callback == nullptr", __func__);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: Transfer type TRANSFER_CALLBACK but callback == nullptr, "
+                                 "pid: %d, session id: %d",
+                                 __func__, pid, sessionId),
+                    __func__);
         }
         break;
     case TRANSFER_OBTAIN:
     case TRANSFER_SYNC:
         break;
     default:
-        errorMessage = StringPrintf("%s: Invalid transfer type %d", __func__, mTransfer);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: Invalid transfer type %d, pid: %d, session id: %d", __func__,
+                             mTransfer, pid, sessionId),
+                __func__);
     }
 
     // invariant that mAudioRecord != 0 is true only after set() returns successfully
     if (mAudioRecord != 0) {
-        errorMessage = StringPrintf("%s: Track already in use", __func__);
-        status = INVALID_OPERATION;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                INVALID_OPERATION,
+                StringPrintf("%s: Track already in use, pid: %d, session id: %d", __func__, pid,
+                             sessionId),
+                __func__);
     }
 
     if (!audio_is_valid_format(mFormat)) {
-        errorMessage = StringPrintf("%s: Format %#x is not valid", __func__, mFormat);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: Format %#x is not valid, pid: %d, session id: %d", __func__,
+                             mFormat, pid, sessionId),
+                __func__);
     }
 
     if (!audio_is_input_channel(mChannelMask)) {
-        errorMessage = StringPrintf("%s: Invalid channel mask %#x", __func__, mChannelMask);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: Invalid channel mask %#x, pid: %d, session id: %d", __func__,
+                             mChannelMask, pid, sessionId),
+                __func__);
     }
 
     mChannelCount = audio_channel_count_from_in_mask(mChannelMask);
@@ -369,7 +411,8 @@
             mAudioRecordThread.clear();
         }
         // bypass error message to avoid logging twice (createRecord_l logs the error).
-        goto exit;
+        mStatus = status;
+        return mStatus;
     }
 
     // TODO: add audio hardware input latency here
@@ -385,15 +428,7 @@
     mFramesRead = 0;
     mFramesReadServerOffset = 0;
 
-error:
-    if (status != NO_ERROR) {
-        mMediaMetrics.markError(status, __FUNCTION__);
-        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
-        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
-    }
-exit:
-    mStatus = status;
-    return status;
+    return logIfErrorAndReturnStatus(status, "", __func__);
 }
 
 // -------------------------------------------------------------------------
@@ -680,6 +715,17 @@
     AutoMutex lock(mLock);
     ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
             __func__, mPortId, deviceId, mSelectedDeviceId);
+    const int64_t beginNs = systemTime();
+    mediametrics::Defer defer([&] {
+        mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_CALLERNAME,
+                     mCallerName.empty()
+                     ? AMEDIAMETRICS_PROP_CALLERNAME_VALUE_UNKNOWN
+                     : mCallerName.c_str())
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETPREFERREDDEVICE)
+                .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+                .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)deviceId)
+                .record(); });
 
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
@@ -783,12 +829,10 @@
     status_t status;
     static const int32_t kMaxCreateAttempts = 3;
     int32_t remainingAttempts = kMaxCreateAttempts;
-    std::string errorMessage;
 
     if (audioFlinger == 0) {
-        errorMessage = StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId);
-        status = NO_INIT;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                NO_INIT, StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId), "");
     }
 
     // mFlags (not mOrigFlags) is modified depending on whether fast request is accepted.
@@ -846,16 +890,34 @@
 
     do {
         media::CreateRecordResponse response;
-        status = audioFlinger->createRecord(VALUE_OR_FATAL(input.toAidl()), response);
-        output = VALUE_OR_FATAL(IAudioFlinger::CreateRecordOutput::fromAidl(response));
+        auto aidlInput = input.toAidl();
+        if (!aidlInput.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s(%d): Could not create record due to invalid input", __func__,
+                                 mPortId),
+                    "");
+        }
+        status = audioFlinger->createRecord(aidlInput.value(), response);
+
+        auto recordOutput = IAudioFlinger::CreateRecordOutput::fromAidl(response);
+        if (!recordOutput.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s(%d): Could not create record output due to invalid response",
+                                 __func__, mPortId),
+                    "");
+        }
+        output = recordOutput.value();
         if (status == NO_ERROR) {
             break;
         }
         if (status != FAILED_TRANSACTION || --remainingAttempts <= 0) {
-            errorMessage = StringPrintf(
-                    "%s(%d): AudioFlinger could not create record track, status: %d",
-                    __func__, mPortId, status);
-            goto exit;
+            return logIfErrorAndReturnStatus(
+                    status,
+                    StringPrintf("%s(%d): AudioFlinger could not create record track, status: %d",
+                                 __func__, mPortId, status),
+                    "");
         }
         // FAILED_TRANSACTION happens under very specific conditions causing a state mismatch
         // between audio policy manager and audio flinger during the input stream open sequence
@@ -890,9 +952,9 @@
     mHalFormat = output.halConfig.format;
 
     if (output.cblk == 0) {
-        errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
-        status = NO_INIT;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                NO_INIT, StringPrintf("%s(%d): Could not get control block", __func__, mPortId),
+                "");
     }
     // TODO: Using unsecurePointer() has some associated security pitfalls
     //       (see declaration for details).
@@ -900,10 +962,9 @@
     //       issue (e.g. by copying).
     iMemPointer = output.cblk ->unsecurePointer();
     if (iMemPointer == NULL) {
-        errorMessage = StringPrintf(
-                "%s(%d): Could not get control block pointer", __func__, mPortId);
-        status = NO_INIT;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                NO_INIT,
+                StringPrintf("%s(%d): Could not get control block pointer", __func__, mPortId), "");
     }
     cblk = static_cast<audio_track_cblk_t*>(iMemPointer);
 
@@ -920,10 +981,9 @@
         //       issue (e.g. by copying).
         buffers = output.buffers->unsecurePointer();
         if (buffers == NULL) {
-            errorMessage = StringPrintf(
-                    "%s(%d): Could not get buffer pointer", __func__, mPortId);
-            status = NO_INIT;
-            goto exit;
+            return logIfErrorAndReturnStatus(
+                    NO_INIT,
+                    StringPrintf("%s(%d): Could not get buffer pointer", __func__, mPortId), "");
         }
     }
 
@@ -1015,15 +1075,8 @@
         .set(AMEDIAMETRICS_PROP_SELECTEDMICFIELDDIRECTION, (double)mSelectedMicFieldDimension)
         .record();
 
-exit:
-    if (status != NO_ERROR) {
-        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
-        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
-    }
-
-    mStatus = status;
     // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
-    return status;
+    return logIfErrorAndReturnStatus(status, "", "");
 }
 
 // Report error associated with the event and some configuration details.
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index aa51652..ee44074 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -60,6 +60,8 @@
 using media::audio::common::AudioMMapPolicyInfo;
 using media::audio::common::AudioMMapPolicyType;
 using media::audio::common::AudioOffloadInfo;
+using media::audio::common::AudioPolicyForceUse;
+using media::audio::common::AudioPolicyForcedConfig;
 using media::audio::common::AudioSource;
 using media::audio::common::AudioStreamType;
 using media::audio::common::AudioUsage;
@@ -322,20 +324,15 @@
     return NO_ERROR;
 }
 
-status_t AudioSystem::getStreamVolume(audio_stream_type_t stream, float* volume,
-                                      audio_io_handle_t output) {
-    if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
+status_t AudioSystem::setPortsVolume(
+        const std::vector<audio_port_handle_t>& portIds, float volume, audio_io_handle_t output) {
     const sp<IAudioFlinger> af = get_audio_flinger();
     if (af == 0) return PERMISSION_DENIED;
-    *volume = af->streamVolume(stream, output);
-    return NO_ERROR;
-}
-
-status_t AudioSystem::getStreamMute(audio_stream_type_t stream, bool* mute) {
-    if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger> af = get_audio_flinger();
-    if (af == 0) return PERMISSION_DENIED;
-    *mute = af->streamMute(stream);
+    std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(
+                    portIds, legacy2aidl_audio_port_handle_t_int32_t));
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    af->setPortsVolume(portIdsAidl, volume, outputAidl);
     return NO_ERROR;
 }
 
@@ -1059,9 +1056,9 @@
     if (aps == 0) return AUDIO_POLICY_FORCE_NONE;
 
     auto result = [&]() -> ConversionResult<audio_policy_forced_cfg_t> {
-        media::AudioPolicyForceUse usageAidl = VALUE_OR_RETURN(
+        AudioPolicyForceUse usageAidl = VALUE_OR_RETURN(
                 legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(usage));
-        media::AudioPolicyForcedConfig configAidl;
+        AudioPolicyForcedConfig configAidl;
         RETURN_IF_ERROR(statusTFromBinderStatus(
                 aps->getForceUse(usageAidl, &configAidl)));
         return aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(configAidl);
@@ -1098,7 +1095,8 @@
                                        audio_port_handle_t* portId,
                                        std::vector<audio_io_handle_t>* secondaryOutputs,
                                        bool *isSpatialized,
-                                       bool *isBitPerfect) {
+                                       bool *isBitPerfect,
+                                       float *volume) {
     if (attr == nullptr) {
         ALOGE("%s NULL audio attributes", __func__);
         return BAD_VALUE;
@@ -1164,6 +1162,7 @@
     *isBitPerfect = responseAidl.isBitPerfect;
     *attr = VALUE_OR_RETURN_STATUS(
             aidl2legacy_AudioAttributes_audio_attributes_t(responseAidl.attr));
+    *volume = responseAidl.volume;
 
     return OK;
 }
@@ -1289,6 +1288,21 @@
     (void) status;
 }
 
+status_t AudioSystem::setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                     const char *address,
+                                                     bool enabled,
+                                                     audio_stream_type_t streamToDriveAbs) {
+    const sp<IAudioPolicyService> aps = get_audio_policy_service();
+    if (aps == nullptr) return PERMISSION_DENIED;
+
+    AudioDevice deviceAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_device_AudioDevice(deviceType, address));
+    AudioStreamType streamToDriveAbsAidl = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_stream_type_t_AudioStreamType(streamToDriveAbs));
+    return statusTFromBinderStatus(
+            aps->setDeviceAbsoluteVolumeEnabled(deviceAidl, enabled, streamToDriveAbsAidl));
+}
+
 status_t AudioSystem::initStreamVolume(audio_stream_type_t stream,
                                        int indexMin,
                                        int indexMax) {
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 5d96d8e..d7c0b5b 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -188,6 +188,14 @@
     return result.value_or(false);
 }
 
+status_t AudioTrack::logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage) {
+    if (status != NO_ERROR) {
+        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
+        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
+    }
+    mStatus = status;
+    return mStatus;
+}
 // ---------------------------------------------------------------------------
 
 void AudioTrack::MediaMetrics::gather(const AudioTrack *track)
@@ -319,13 +327,6 @@
         const audio_attributes_t* pAttributes,
         bool doNotReconnect,
         float maxRequiredSpeed)
-    : mStatus(NO_INIT),
-      mState(STATE_STOPPED),
-      mPreviousPriority(ANDROID_PRIORITY_NORMAL),
-      mPreviousSchedulingGroup(SP_DEFAULT),
-      mPausedPosition(0),
-      mSelectedDeviceId(AUDIO_PORT_HANDLE_NONE),
-      mAudioTrackCallback(new AudioTrackCallback())
 {
     mAttributes = AUDIO_ATTRIBUTES_INITIALIZER;
 
@@ -372,6 +373,10 @@
                 mSessionId, IPCThreadState::self()->getCallingPid(), clientPid);
         AudioSystem::releaseAudioSessionId(mSessionId, clientPid);
     }
+
+    if (mOutput != AUDIO_IO_HANDLE_NONE) {
+        AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
+    }
 }
 
 void AudioTrack::stopAndJoinCallbacks() {
@@ -420,9 +425,16 @@
     uint32_t channelCount;
     pid_t callingPid;
     pid_t myPid;
-    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
-    pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    std::string errorMessage;
+    auto uid = aidl2legacy_int32_t_uid_t(attributionSource.uid);
+    auto pid = aidl2legacy_int32_t_pid_t(attributionSource.pid);
+    if (!uid.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s: received invalid attribution source uid", __func__));
+    }
+    if (!pid.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s: received invalid attribution source pid", __func__));
+    }
     // Note mPortId is not valid until the track is created, so omit mPortId in ALOG for set.
     ALOGV("%s(): streamType %d, sampleRate %u, format %#x, channelMask %#x, frameCount %zu, "
           "flags %#x, notificationFrames %d, sessionId %d, transferType %d, uid %d, pid %d",
@@ -493,34 +505,33 @@
     case TRANSFER_CALLBACK:
     case TRANSFER_SYNC_NOTIF_CALLBACK:
         if (callback == nullptr || sharedBuffer != 0) {
-            errorMessage = StringPrintf(
-                    "%s: Transfer type %s but callback == nullptr || sharedBuffer != 0",
-                    convertTransferToText(transferType), __func__);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf(
+                            "%s: Transfer type %s but callback == nullptr || sharedBuffer != 0",
+                            convertTransferToText(transferType), __func__));
         }
         break;
     case TRANSFER_OBTAIN:
     case TRANSFER_SYNC:
         if (sharedBuffer != 0) {
-            errorMessage = StringPrintf(
-                    "%s: Transfer type TRANSFER_OBTAIN but sharedBuffer != 0", __func__);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: Transfer type TRANSFER_OBTAIN but sharedBuffer != 0",
+                                 __func__));
         }
         break;
     case TRANSFER_SHARED:
         if (sharedBuffer == 0) {
-            errorMessage = StringPrintf(
-                    "%s: Transfer type TRANSFER_SHARED but sharedBuffer == 0", __func__);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: Transfer type TRANSFER_SHARED but sharedBuffer == 0",
+                                 __func__));
         }
         break;
     default:
-        errorMessage = StringPrintf("%s: Invalid transfer type %d", __func__, transferType);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s: Invalid transfer type %d", __func__, transferType));
     }
     mSharedBuffer = sharedBuffer;
     mTransfer = transferType;
@@ -531,9 +542,8 @@
 
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
-        errorMessage = StringPrintf("%s: Track already in use", __func__);
-        status = INVALID_OPERATION;
-        goto error;
+        return logIfErrorAndReturnStatus(INVALID_OPERATION,
+                                         StringPrintf("%s: Track already in use", __func__));
     }
 
     // handle default values first.
@@ -542,9 +552,8 @@
     }
     if (pAttributes == NULL) {
         if (uint32_t(streamType) >= AUDIO_STREAM_PUBLIC_CNT) {
-            errorMessage = StringPrintf("%s: Invalid stream type %d", __func__, streamType);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE, StringPrintf("%s: Invalid stream type %d", __func__, streamType));
         }
         mOriginalStreamType = streamType;
     } else {
@@ -553,15 +562,13 @@
 
     // validate parameters
     if (!audio_is_valid_format(format)) {
-        errorMessage = StringPrintf("%s: Invalid format %#x", __func__, format);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(BAD_VALUE,
+                                         StringPrintf("%s: Invalid format %#x", __func__, format));
     }
 
     if (!audio_is_output_channel(channelMask)) {
-        errorMessage = StringPrintf("%s: Invalid channel mask %#x",  __func__, channelMask);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s: Invalid channel mask %#x", __func__, channelMask));
     }
     channelCount = audio_channel_count_from_out_mask(channelMask);
     mChannelCount = channelCount;
@@ -576,10 +583,9 @@
 
     // sampling rate must be specified for direct outputs
     if (sampleRate == 0 && (mFlags & AUDIO_OUTPUT_FLAG_DIRECT) != 0) {
-        errorMessage = StringPrintf(
-                "%s: sample rate must be specified for direct outputs", __func__);
-        status = BAD_VALUE;
-        goto error;
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: sample rate must be specified for direct outputs", __func__));
     }
     // 1.0 <= mMaxRequiredSpeed <= AUDIO_TIMESTRETCH_SPEED_MAX
     mMaxRequiredSpeed = min(max(maxRequiredSpeed, 1.0f), AUDIO_TIMESTRETCH_SPEED_MAX);
@@ -607,17 +613,16 @@
         mNotificationsPerBufferReq = 0;
     } else {
         if (!(mFlags & AUDIO_OUTPUT_FLAG_FAST)) {
-            errorMessage = StringPrintf(
-                    "%s: notificationFrames=%d not permitted for non-fast track",
-                    __func__, notificationFrames);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: notificationFrames=%d not permitted for non-fast track",
+                                 __func__, notificationFrames));
         }
         if (frameCount > 0) {
-            ALOGE("%s(): notificationFrames=%d not permitted with non-zero frameCount=%zu",
-                    __func__, notificationFrames, frameCount);
-            status = BAD_VALUE;
-            goto error;
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE, StringPrintf("%s(): notificationFrames=%d not permitted "
+                                            "with non-zero frameCount=%zu",
+                                            __func__, notificationFrames, frameCount));
         }
         mNotificationFramesReq = 0;
         const uint32_t minNotificationsPerBuffer = 1;
@@ -634,12 +639,24 @@
     mClientAttributionSource = AttributionSourceState(attributionSource);
     callingPid = IPCThreadState::self()->getCallingPid();
     myPid = getpid();
-    if (uid == -1 || (callingPid != myPid)) {
-        mClientAttributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(
-            IPCThreadState::self()->getCallingUid()));
+    if (uid.value() == -1 || (callingPid != myPid)) {
+        auto clientAttributionSourceUid =
+                legacy2aidl_uid_t_int32_t(IPCThreadState::self()->getCallingUid());
+        if (!clientAttributionSourceUid.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: received invalid client attribution source uid", __func__));
+        }
+        mClientAttributionSource.uid = clientAttributionSourceUid.value();
     }
-    if (pid == (pid_t)-1 || (callingPid != myPid)) {
-        mClientAttributionSource.pid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(callingPid));
+    if (pid.value() == (pid_t)-1 || (callingPid != myPid)) {
+        auto clientAttributionSourcePid = legacy2aidl_uid_t_int32_t(callingPid);
+        if (!clientAttributionSourcePid.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s: received invalid client attribution source pid", __func__));
+        }
+        mClientAttributionSource.pid = clientAttributionSourcePid.value();
     }
     mAuxEffectId = 0;
     mCallback = callback;
@@ -662,7 +679,8 @@
             mAudioTrackThread.clear();
         }
         // We do not goto error to prevent double-logging errors.
-        goto exit;
+        mStatus = status;
+        return mStatus;
     }
 
     mLoopCount = 0;
@@ -677,7 +695,7 @@
     mReleased = 0;
     mStartNs = 0;
     mStartFromZeroUs = 0;
-    AudioSystem::acquireAudioSessionId(mSessionId, pid, uid);
+    AudioSystem::acquireAudioSessionId(mSessionId, pid.value(), uid.value());
     mSequence = 1;
     mObservedSequence = mSequence;
     mInUnderrun = false;
@@ -695,15 +713,7 @@
     mFramesWrittenAtRestore = -1; // -1 is a unique initializer.
     mVolumeHandler = new media::VolumeHandler();
 
-error:
-    if (status != NO_ERROR) {
-        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
-        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
-    }
-    // fall through
-exit:
-    mStatus = status;
-    return status;
+    return logIfErrorAndReturnStatus(status, "");
 }
 
 
@@ -730,8 +740,22 @@
         audio_port_handle_t selectedDeviceId)
 {
     AttributionSourceState attributionSource;
-    attributionSource.uid = VALUE_OR_FATAL(legacy2aidl_uid_t_int32_t(uid));
-    attributionSource.pid = VALUE_OR_FATAL(legacy2aidl_pid_t_int32_t(pid));
+    auto attributionSourceUid = legacy2aidl_uid_t_int32_t(uid);
+    if (!attributionSourceUid.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: received invalid attribution source uid, uid: %d, session id: %d",
+                             __func__, uid, sessionId));
+    }
+    attributionSource.uid = attributionSourceUid.value();
+    auto attributionSourcePid = legacy2aidl_pid_t_int32_t(pid);
+    if (!attributionSourcePid.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE,
+                StringPrintf("%s: received invalid attribution source pid, pid: %d, sessionId: %d",
+                             __func__, pid, sessionId));
+    }
+    attributionSource.pid = attributionSourcePid.value();
     attributionSource.token = sp<BBinder>::make();
     if (callback) {
         mLegacyCallbackWrapper = sp<LegacyCallbackWrapper>::make(callback, user);
@@ -1174,6 +1198,13 @@
     mSampleRate = rate;
     mProxy->setSampleRate(effectiveSampleRate);
 
+    mediametrics::LogItem(mMetricsId)
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETSAMPLERATE)
+            .set(AMEDIAMETRICS_PROP_PREFIX_EFFECTIVE AMEDIAMETRICS_PROP_SAMPLERATE,
+                    static_cast<int32_t>(effectiveSampleRate))
+            .set(AMEDIAMETRICS_PROP_SAMPLERATE, static_cast<int32_t>(rate))
+            .record();
+
     return NO_ERROR;
 }
 
@@ -1676,18 +1707,30 @@
     AutoMutex lock(mLock);
     ALOGV("%s(%d): deviceId=%d mSelectedDeviceId=%d",
             __func__, mPortId, deviceId, mSelectedDeviceId);
+    const int64_t beginNs = systemTime();
+    mediametrics::Defer defer([&] {
+        mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_CALLERNAME,
+                     mCallerName.empty()
+                     ? AMEDIAMETRICS_PROP_CALLERNAME_VALUE_UNKNOWN
+                     : mCallerName.c_str())
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETPREFERREDDEVICE)
+                .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+                .set(AMEDIAMETRICS_PROP_SELECTEDDEVICEID, (int32_t)deviceId)
+                .record(); });
+
     if (mSelectedDeviceId != deviceId) {
         mSelectedDeviceId = deviceId;
         if (mStatus == NO_ERROR) {
             if (isOffloadedOrDirect_l()) {
-                if (mState == STATE_STOPPED || mState == STATE_FLUSHED) {
-                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
-                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
-                } else {
+                if (isPlaying_l()) {
                     ALOGW("%s(%d). Offloaded or Direct track is not STOPPED or FLUSHED. "
                           "State: %s.",
                             __func__, mPortId, stateToString(mState));
                     result = INVALID_OPERATION;
+                } else {
+                    ALOGD("%s(%d): creating a new AudioTrack", __func__, mPortId);
+                    result = restoreTrack_l("setOutputDevice", true /* forceRestore */);
                 }
             } else {
                 // allow track invalidation when track is not playing to propagate
@@ -1792,15 +1835,11 @@
 status_t AudioTrack::createTrack_l()
 {
     status_t status;
-    bool callbackAdded = false;
-    std::string errorMessage;
 
     const sp<IAudioFlinger>& audioFlinger = AudioSystem::get_audio_flinger();
     if (audioFlinger == 0) {
-        errorMessage = StringPrintf("%s(%d): Could not get audioflinger",
-                __func__, mPortId);
-        status = DEAD_OBJECT;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                DEAD_OBJECT, StringPrintf("%s(%d): Could not get audioflinger", __func__, mPortId));
     }
 
     {
@@ -1868,21 +1907,31 @@
     input.audioTrackCallback = mAudioTrackCallback;
 
     media::CreateTrackResponse response;
-    status = audioFlinger->createTrack(VALUE_OR_FATAL(input.toAidl()), response);
+    auto aidlInput = input.toAidl();
+    if (!aidlInput.ok()) {
+        return logIfErrorAndReturnStatus(
+                BAD_VALUE, StringPrintf("%s(%d): Could not create track due to invalid input",
+                                        __func__, mPortId));
+    }
+    status = audioFlinger->createTrack(aidlInput.value(), response);
 
     IAudioFlinger::CreateTrackOutput output{};
     if (status == NO_ERROR) {
-        output = VALUE_OR_FATAL(IAudioFlinger::CreateTrackOutput::fromAidl(response));
+        auto trackOutput = IAudioFlinger::CreateTrackOutput::fromAidl(response);
+        if (!trackOutput.ok()) {
+            return logIfErrorAndReturnStatus(
+                    BAD_VALUE,
+                    StringPrintf("%s(%d): Could not create track output due to invalid response",
+                                 __func__, mPortId));
+        }
+        output = trackOutput.value();
     }
 
     if (status != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
-        errorMessage = StringPrintf(
-                "%s(%d): AudioFlinger could not create track, status: %d output %d",
-                __func__, mPortId, status, output.outputId);
-        if (status == NO_ERROR) {
-            status = INVALID_OPERATION; // device not ready
-        }
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                status == NO_ERROR ? INVALID_OPERATION : status,  // device not ready
+                StringPrintf("%s(%d): AudioFlinger could not create track, status: %d output %d",
+                             __func__, mPortId, status, output.outputId));
     }
     ALOG_ASSERT(output.audioTrack != 0);
 
@@ -1912,22 +1961,22 @@
     // FIXME compare to AudioRecord
     std::optional<media::SharedFileRegion> sfr;
     output.audioTrack->getCblk(&sfr);
-    sp<IMemory> iMem = VALUE_OR_FATAL(aidl2legacy_NullableSharedFileRegion_IMemory(sfr));
-    if (iMem == 0) {
-        errorMessage = StringPrintf("%s(%d): Could not get control block", __func__, mPortId);
-        status = FAILED_TRANSACTION;
-        goto exit;
+    auto iMemory = aidl2legacy_NullableSharedFileRegion_IMemory(sfr);
+    if (!iMemory.ok() || iMemory.value() == 0) {
+        return logIfErrorAndReturnStatus(
+                FAILED_TRANSACTION,
+                StringPrintf("%s(%d): Could not get control block", __func__, mPortId));
     }
+    sp<IMemory> iMem = iMemory.value();
     // TODO: Using unsecurePointer() has some associated security pitfalls
     //       (see declaration for details).
     //       Either document why it is safe in this case or address the
     //       issue (e.g. by copying).
     void *iMemPointer = iMem->unsecurePointer();
     if (iMemPointer == NULL) {
-        errorMessage = StringPrintf(
-                "%s(%d): Could not get control block pointer", __func__, mPortId);
-        status = FAILED_TRANSACTION;
-        goto exit;
+        return logIfErrorAndReturnStatus(
+                FAILED_TRANSACTION,
+                StringPrintf("%s(%d): Could not get control block pointer", __func__, mPortId));
     }
     // invariant that mAudioTrack != 0 is true only after set() returns successfully
     if (mAudioTrack != 0) {
@@ -1962,7 +2011,6 @@
             AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
         }
         AudioSystem::addAudioDeviceCallback(this, output.outputId, output.portId);
-        callbackAdded = true;
     }
 
     mPortId = output.portId;
@@ -1987,11 +2035,9 @@
         //       issue (e.g. by copying).
         buffers = mSharedBuffer->unsecurePointer();
         if (buffers == NULL) {
-            errorMessage = StringPrintf(
-                    "%s(%d): Could not get buffer pointer", __func__, mPortId);
-            ALOGE("%s", errorMessage.c_str());
-            status = FAILED_TRANSACTION;
-            goto exit;
+            return logIfErrorAndReturnStatus(
+                    FAILED_TRANSACTION,
+                    StringPrintf("%s(%d): Could not get buffer pointer", __func__, mPortId));
         }
     }
 
@@ -2088,19 +2134,8 @@
 
     }
 
-exit:
-    if (status != NO_ERROR) {
-        if (callbackAdded) {
-            // note: mOutput is always valid is callbackAdded is true
-            AudioSystem::removeAudioDeviceCallback(this, mOutput, mPortId);
-        }
-        ALOGE_IF(!errorMessage.empty(), "%s", errorMessage.c_str());
-        reportError(status, AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE, errorMessage.c_str());
-    }
-    mStatus = status;
-
     // sp<IAudioTrack> track destructor will cause releaseOutput() to be called by AudioFlinger
-    return status;
+    return logIfErrorAndReturnStatus(status, "");
 }
 
 void AudioTrack::reportError(status_t status, const char *event, const char *message) const
@@ -2401,12 +2436,14 @@
     int32_t flags = android_atomic_and(
         ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END), &mCblk->mFlags);
 
+    const bool isOffloaded = isOffloaded_l();
+    const bool isOffloadedOrDirect = isOffloadedOrDirect_l();
     // Check for track invalidation
     if (flags & CBLK_INVALID) {
         // for offloaded tracks restoreTrack_l() will just update the sequence and clear
         // AudioSystem cache. We should not exit here but after calling the callback so
         // that the upper layers can recreate the track
-        if (!isOffloadedOrDirect_l() || (mSequence == mObservedSequence)) {
+        if (!isOffloadedOrDirect || (mSequence == mObservedSequence)) {
             status_t status __unused = restoreTrack_l("processAudioBuffer");
             // FIXME unused status
             // after restoration, continue below to make sure that the loop and buffer events
@@ -2576,7 +2613,7 @@
         mObservedSequence = sequence;
         callback->onNewIAudioTrack();
         // for offloaded tracks, just wait for the upper layers to recreate the track
-        if (isOffloadedOrDirect()) {
+        if (isOffloadedOrDirect) {
             return NS_INACTIVE;
         }
     }
@@ -2664,7 +2701,7 @@
                 __func__, mPortId, mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err);
         if (err != NO_ERROR) {
             if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR ||
-                    (isOffloaded() && (err == DEAD_OBJECT))) {
+                    (isOffloaded && (err == DEAD_OBJECT))) {
                 // FIXME bug 25195759
                 return 1000000;
             }
@@ -2750,7 +2787,7 @@
             // buffer size and skip the loop entirely.
 
             nsecs_t myns;
-            if (audio_has_proportional_frames(mFormat)) {
+            if (!isOffloaded && audio_has_proportional_frames(mFormat)) {
                 // time to wait based on buffer occupancy
                 const nsecs_t datans = mRemainingFrames <= avail ? 0 :
                         framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed);
@@ -3035,6 +3072,7 @@
         const sp<VolumeShaper::Configuration>& configuration,
         const sp<VolumeShaper::Operation>& operation)
 {
+    const int64_t beginNs = systemTime();
     AutoMutex lock(mLock);
     mVolumeHandler->setIdIfNecessary(configuration);
     media::VolumeShaperConfiguration config;
@@ -3042,6 +3080,18 @@
     media::VolumeShaperOperation op;
     operation->writeToParcelable(&op);
     VolumeShaper::Status status;
+
+    mediametrics::Defer defer([&] {
+        mediametrics::LogItem(mMetricsId)
+                .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_APPLYVOLUMESHAPER)
+                .set(AMEDIAMETRICS_PROP_EXECUTIONTIMENS, (int64_t)(systemTime() - beginNs))
+                .set(AMEDIAMETRICS_PROP_STATE, stateToString(mState))
+                .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)status)
+                .set(AMEDIAMETRICS_PROP_TOSTRING, configuration->toString()
+                                 .append(" ")
+                                 .append(operation->toString()))
+                .record(); });
+
     mAudioTrack->applyVolumeShaper(config, op, &status);
 
     if (status == DEAD_OBJECT) {
@@ -3176,7 +3226,12 @@
         media::AudioTimestampInternal ts;
         mAudioTrack->getTimestamp(&ts, &status);
         if (status == OK) {
-            timestamp = VALUE_OR_FATAL(aidl2legacy_AudioTimestampInternal_AudioTimestamp(ts));
+            auto legacyTs = aidl2legacy_AudioTimestampInternal_AudioTimestamp(ts);
+            if (!legacyTs.ok()) {
+                return logIfErrorAndReturnStatus(
+                        BAD_VALUE, StringPrintf("%s: received invalid audio timestamp", __func__));
+            }
+            timestamp = legacyTs.value();
         }
     } else {
         // read timestamp from shared memory
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index aa5c840..9241973 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -350,32 +350,13 @@
     return statusTFromBinderStatus(mDelegate->setStreamMute(streamAidl, muted));
 }
 
-float AudioFlingerClientAdapter::streamVolume(audio_stream_type_t stream,
-                                              audio_io_handle_t output) const {
-    auto result = [&]() -> ConversionResult<float> {
-        AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
-                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-        int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
-        float aidlRet;
-        RETURN_IF_ERROR(statusTFromBinderStatus(
-                mDelegate->streamVolume(streamAidl, outputAidl, &aidlRet)));
-        return aidlRet;
-    }();
-    // Failure is ignored.
-    return result.value_or(0.f);
-}
-
-bool AudioFlingerClientAdapter::streamMute(audio_stream_type_t stream) const {
-    auto result = [&]() -> ConversionResult<bool> {
-        AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
-                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-        bool aidlRet;
-        RETURN_IF_ERROR(statusTFromBinderStatus(
-                mDelegate->streamMute(streamAidl, &aidlRet)));
-        return aidlRet;
-    }();
-    // Failure is ignored.
-    return result.value_or(false);
+status_t AudioFlingerClientAdapter::setPortsVolume(
+        const std::vector<audio_port_handle_t>& portIds, float volume, audio_io_handle_t output) {
+    std::vector<int32_t> portIdsAidl = VALUE_OR_RETURN_STATUS(
+            convertContainer<std::vector<int32_t>>(
+                    portIds, legacy2aidl_audio_port_handle_t_int32_t));
+    int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
+    return statusTFromBinderStatus(mDelegate->setPortsVolume(portIdsAidl, volume, outputAidl));
 }
 
 status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
@@ -918,6 +899,16 @@
     return OK;
 }
 
+status_t AudioFlingerClientAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMuted) {
+    return statusTFromBinderStatus(mDelegate->setTracksInternalMute(tracksInternalMuted));
+}
+
+status_t AudioFlingerClientAdapter::resetReferencesForTest() {
+    RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mDelegate->resetReferencesForTest()));
+    return OK;
+}
+
 ////////////////////////////////////////////////////////////////////////////////////////////////////
 // AudioFlingerServerAdapter
 AudioFlingerServerAdapter::AudioFlingerServerAdapter(
@@ -1030,21 +1021,14 @@
     return Status::fromStatusT(mDelegate->setStreamMute(streamLegacy, muted));
 }
 
-Status AudioFlingerServerAdapter::streamVolume(AudioStreamType stream, int32_t output,
-                                               float* _aidl_return) {
-    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
+Status AudioFlingerServerAdapter::setPortsVolume(
+        const std::vector<int32_t>& portIds, float volume, int32_t output) {
+    std::vector<audio_port_handle_t> portIdsLegacy = VALUE_OR_RETURN_BINDER(
+            convertContainer<std::vector<audio_port_handle_t>>(
+                    portIds, aidl2legacy_int32_t_audio_port_handle_t));
     audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
             aidl2legacy_int32_t_audio_io_handle_t(output));
-    *_aidl_return = mDelegate->streamVolume(streamLegacy, outputLegacy);
-    return Status::ok();
-}
-
-Status AudioFlingerServerAdapter::streamMute(AudioStreamType stream, bool* _aidl_return) {
-    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
-    *_aidl_return = mDelegate->streamMute(streamLegacy);
-    return Status::ok();
+    return Status::fromStatusT(mDelegate->setPortsVolume(portIdsLegacy, volume, outputLegacy));
 }
 
 Status AudioFlingerServerAdapter::setMode(AudioMode mode) {
@@ -1477,4 +1461,14 @@
     return Status::ok();
 }
 
+Status AudioFlingerServerAdapter::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    return Status::fromStatusT(mDelegate->setTracksInternalMute(tracksInternalMute));
+}
+
+Status AudioFlingerServerAdapter::resetReferencesForTest() {
+    RETURN_BINDER_IF_ERROR(mDelegate->resetReferencesForTest());
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/media/libaudioclient/PolicyAidlConversion.cpp b/media/libaudioclient/PolicyAidlConversion.cpp
index 441e329..a414cb7 100644
--- a/media/libaudioclient/PolicyAidlConversion.cpp
+++ b/media/libaudioclient/PolicyAidlConversion.cpp
@@ -296,134 +296,6 @@
     return unexpected(BAD_VALUE);
 }
 
-ConversionResult<audio_policy_force_use_t>
-aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(media::AudioPolicyForceUse aidl) {
-    switch (aidl) {
-        case media::AudioPolicyForceUse::COMMUNICATION:
-            return AUDIO_POLICY_FORCE_FOR_COMMUNICATION;
-        case media::AudioPolicyForceUse::MEDIA:
-            return AUDIO_POLICY_FORCE_FOR_MEDIA;
-        case media::AudioPolicyForceUse::RECORD:
-            return AUDIO_POLICY_FORCE_FOR_RECORD;
-        case media::AudioPolicyForceUse::DOCK:
-            return AUDIO_POLICY_FORCE_FOR_DOCK;
-        case media::AudioPolicyForceUse::SYSTEM:
-            return AUDIO_POLICY_FORCE_FOR_SYSTEM;
-        case media::AudioPolicyForceUse::HDMI_SYSTEM_AUDIO:
-            return AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO;
-        case media::AudioPolicyForceUse::ENCODED_SURROUND:
-            return AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND;
-        case media::AudioPolicyForceUse::VIBRATE_RINGING:
-            return AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<media::AudioPolicyForceUse>
-legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy) {
-    switch (legacy) {
-        case AUDIO_POLICY_FORCE_FOR_COMMUNICATION:
-            return media::AudioPolicyForceUse::COMMUNICATION;
-        case AUDIO_POLICY_FORCE_FOR_MEDIA:
-            return media::AudioPolicyForceUse::MEDIA;
-        case AUDIO_POLICY_FORCE_FOR_RECORD:
-            return media::AudioPolicyForceUse::RECORD;
-        case AUDIO_POLICY_FORCE_FOR_DOCK:
-            return media::AudioPolicyForceUse::DOCK;
-        case AUDIO_POLICY_FORCE_FOR_SYSTEM:
-            return media::AudioPolicyForceUse::SYSTEM;
-        case AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO:
-            return media::AudioPolicyForceUse::HDMI_SYSTEM_AUDIO;
-        case AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND:
-            return media::AudioPolicyForceUse::ENCODED_SURROUND;
-        case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
-            return media::AudioPolicyForceUse::VIBRATE_RINGING;
-        case AUDIO_POLICY_FORCE_USE_CNT:
-            break;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<audio_policy_forced_cfg_t>
-aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(media::AudioPolicyForcedConfig aidl) {
-    switch (aidl) {
-        case media::AudioPolicyForcedConfig::NONE:
-            return AUDIO_POLICY_FORCE_NONE;
-        case media::AudioPolicyForcedConfig::SPEAKER:
-            return AUDIO_POLICY_FORCE_SPEAKER;
-        case media::AudioPolicyForcedConfig::HEADPHONES:
-            return AUDIO_POLICY_FORCE_HEADPHONES;
-        case media::AudioPolicyForcedConfig::BT_SCO:
-            return AUDIO_POLICY_FORCE_BT_SCO;
-        case media::AudioPolicyForcedConfig::BT_A2DP:
-            return AUDIO_POLICY_FORCE_BT_A2DP;
-        case media::AudioPolicyForcedConfig::WIRED_ACCESSORY:
-            return AUDIO_POLICY_FORCE_WIRED_ACCESSORY;
-        case media::AudioPolicyForcedConfig::BT_CAR_DOCK:
-            return AUDIO_POLICY_FORCE_BT_CAR_DOCK;
-        case media::AudioPolicyForcedConfig::BT_DESK_DOCK:
-            return AUDIO_POLICY_FORCE_BT_DESK_DOCK;
-        case media::AudioPolicyForcedConfig::ANALOG_DOCK:
-            return AUDIO_POLICY_FORCE_ANALOG_DOCK;
-        case media::AudioPolicyForcedConfig::DIGITAL_DOCK:
-            return AUDIO_POLICY_FORCE_DIGITAL_DOCK;
-        case media::AudioPolicyForcedConfig::NO_BT_A2DP:
-            return AUDIO_POLICY_FORCE_NO_BT_A2DP;
-        case media::AudioPolicyForcedConfig::SYSTEM_ENFORCED:
-            return AUDIO_POLICY_FORCE_SYSTEM_ENFORCED;
-        case media::AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED:
-            return AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED;
-        case media::AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER:
-            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER;
-        case media::AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS:
-            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS;
-        case media::AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL:
-            return AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL;
-    }
-    return unexpected(BAD_VALUE);
-}
-
-ConversionResult<media::AudioPolicyForcedConfig>
-legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy) {
-    switch (legacy) {
-        case AUDIO_POLICY_FORCE_NONE:
-            return media::AudioPolicyForcedConfig::NONE;
-        case AUDIO_POLICY_FORCE_SPEAKER:
-            return media::AudioPolicyForcedConfig::SPEAKER;
-        case AUDIO_POLICY_FORCE_HEADPHONES:
-            return media::AudioPolicyForcedConfig::HEADPHONES;
-        case AUDIO_POLICY_FORCE_BT_SCO:
-            return media::AudioPolicyForcedConfig::BT_SCO;
-        case AUDIO_POLICY_FORCE_BT_A2DP:
-            return media::AudioPolicyForcedConfig::BT_A2DP;
-        case AUDIO_POLICY_FORCE_WIRED_ACCESSORY:
-            return media::AudioPolicyForcedConfig::WIRED_ACCESSORY;
-        case AUDIO_POLICY_FORCE_BT_CAR_DOCK:
-            return media::AudioPolicyForcedConfig::BT_CAR_DOCK;
-        case AUDIO_POLICY_FORCE_BT_DESK_DOCK:
-            return media::AudioPolicyForcedConfig::BT_DESK_DOCK;
-        case AUDIO_POLICY_FORCE_ANALOG_DOCK:
-            return media::AudioPolicyForcedConfig::ANALOG_DOCK;
-        case AUDIO_POLICY_FORCE_DIGITAL_DOCK:
-            return media::AudioPolicyForcedConfig::DIGITAL_DOCK;
-        case AUDIO_POLICY_FORCE_NO_BT_A2DP:
-            return media::AudioPolicyForcedConfig::NO_BT_A2DP;
-        case AUDIO_POLICY_FORCE_SYSTEM_ENFORCED:
-            return media::AudioPolicyForcedConfig::SYSTEM_ENFORCED;
-        case AUDIO_POLICY_FORCE_HDMI_SYSTEM_AUDIO_ENFORCED:
-            return media::AudioPolicyForcedConfig::HDMI_SYSTEM_AUDIO_ENFORCED;
-        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_NEVER:
-            return media::AudioPolicyForcedConfig::ENCODED_SURROUND_NEVER;
-        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS:
-            return media::AudioPolicyForcedConfig::ENCODED_SURROUND_ALWAYS;
-        case AUDIO_POLICY_FORCE_ENCODED_SURROUND_MANUAL:
-            return media::AudioPolicyForcedConfig::ENCODED_SURROUND_MANUAL;
-        case AUDIO_POLICY_FORCE_CFG_CNT:
-            break;
-    }
-    return unexpected(BAD_VALUE);
-}
-
 ConversionResult<device_role_t>
 aidl2legacy_DeviceRole_device_role_t(media::DeviceRole aidl) {
     switch (aidl) {
diff --git a/media/libaudioclient/TEST_MAPPING b/media/libaudioclient/TEST_MAPPING
index 68dba34..29b876c 100644
--- a/media/libaudioclient/TEST_MAPPING
+++ b/media/libaudioclient/TEST_MAPPING
@@ -47,12 +47,7 @@
       "name": "audioeffect_analysis"
     },
     {
-      "name": "CtsVirtualDevicesTestCases",
-      "options" : [
-        {
-          "include-filter": "android.virtualdevice.cts.VirtualAudioTest"
-        }
-      ]
+      "name": "CtsVirtualDevicesAudioTestCases"
     }
   ]
 }
diff --git a/media/libaudioclient/ToneGenerator.cpp b/media/libaudioclient/ToneGenerator.cpp
index e213f08..d325d0a 100644
--- a/media/libaudioclient/ToneGenerator.cpp
+++ b/media/libaudioclient/ToneGenerator.cpp
@@ -1033,17 +1033,11 @@
 
     mState = TONE_IDLE;
 
-    if (AudioSystem::getOutputSamplingRate(&mSamplingRate, streamType) != NO_ERROR) {
-        ALOGE("Unable to marshal AudioFlinger");
-        return;
-    }
     mThreadCanCallJava = threadCanCallJava;
     mStreamType = streamType;
     mVolume = volume;
     mpToneDesc = NULL;
     mpNewToneDesc = NULL;
-    // Generate tone by chunks of 20 ms to keep cadencing precision
-    mProcessSize = (mSamplingRate * 20) / 1000;
 
     char value[PROPERTY_VALUE_MAX];
     if (property_get("gsm.operator.iso-country", value, "") == 0) {
@@ -1264,13 +1258,10 @@
                     nsec += 1000000000;
                 }
 
-                if ((sec + 1) > ((time_t)(INT_MAX / mSamplingRate))) {
-                    mMaxSmp = sec * mSamplingRate;
-                } else {
-                    // mSamplingRate is always > 1000
-                    sec = sec * 1000 + nsec / 1000000; // duration in milliseconds
-                    mMaxSmp = (unsigned int)(((int64_t)sec * mSamplingRate) / 1000);
-                }
+                const uint64_t msec = static_cast<uint64_t>(sec) * 1000 + nsec / 1'000'000;
+                mMaxSmp = std::min(static_cast<uint64_t>(TONEGEN_INF - 1),
+                        msec * mSamplingRate / 1000);
+
                 ALOGV("stopTone() forcing mMaxSmp to %d, total for far %" PRIu64, mMaxSmp,
                       mTotalSmp);
             } else {
@@ -1324,21 +1315,21 @@
     mpAudioTrack = new AudioTrack(attributionSource);
     ALOGV("AudioTrack(%p) created", mpAudioTrack.get());
 
+
     audio_attributes_t attr;
     audio_stream_type_t streamType = mStreamType;
-    if (mStreamType == AUDIO_STREAM_VOICE_CALL) {
+    if (mStreamType == AUDIO_STREAM_VOICE_CALL || mStreamType == AUDIO_STREAM_BLUETOOTH_SCO) {
         streamType = AUDIO_STREAM_DTMF;
     }
     attr = AudioSystem::streamTypeToAttributes(streamType);
     attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_LOW_LATENCY);
 
-    const size_t frameCount = mProcessSize;
     status_t status = mpAudioTrack->set(
             AUDIO_STREAM_DEFAULT,
             0,    // sampleRate
             AUDIO_FORMAT_PCM_16_BIT,
             AUDIO_CHANNEL_OUT_MONO,
-            frameCount,
+            0,    // frameCount
             AUDIO_OUTPUT_FLAG_NONE,
             wp<AudioTrack::IAudioTrackCallback>::fromExisting(this),
             0,    // notificationFrames
@@ -1358,6 +1349,10 @@
         return false;
     }
 
+    mSamplingRate = mpAudioTrack->getSampleRate();
+    // Generate tone by chunks of 20 ms to keep cadencing precision
+    mProcessSize = (mSamplingRate * 20) / 1000;
+
     mpAudioTrack->setVolume(mVolume);
     mState = TONE_INIT;
     return true;
@@ -1638,14 +1633,11 @@
 
     mpToneDesc = mpNewToneDesc;
 
-    if (mDurationMs == -1) {
+    if (mDurationMs < 0) {  // mDurationMs is signed, treat all neg numbers as INF.
         mMaxSmp = TONEGEN_INF;
     } else {
-        if (mDurationMs > (int)(TONEGEN_INF / mSamplingRate)) {
-            mMaxSmp = (mDurationMs / 1000) * mSamplingRate;
-        } else {
-            mMaxSmp = (mDurationMs * mSamplingRate) / 1000;
-        }
+        mMaxSmp = std::min(static_cast<uint64_t>(TONEGEN_INF - 1),
+                static_cast<uint64_t>(mDurationMs) * mSamplingRate / 1000);
         ALOGV("prepareWave, duration limited to %d ms", mDurationMs);
     }
 
@@ -1676,7 +1668,8 @@
     if (mpToneDesc->segments[0].duration == TONEGEN_INF) {
         mNextSegSmp = TONEGEN_INF;
     } else{
-        mNextSegSmp = (mpToneDesc->segments[0].duration * mSamplingRate) / 1000;
+        mNextSegSmp = std::min(static_cast<uint64_t>(TONEGEN_INF - 1),
+                static_cast<uint64_t>(mpToneDesc->segments[0].duration) * mSamplingRate / 1000);
     }
 
     return true;
diff --git a/media/libaudioclient/TrackPlayerBase.cpp b/media/libaudioclient/TrackPlayerBase.cpp
index 4fc1c44..bc38251 100644
--- a/media/libaudioclient/TrackPlayerBase.cpp
+++ b/media/libaudioclient/TrackPlayerBase.cpp
@@ -38,12 +38,12 @@
                            player_type_t playerType, audio_usage_t usage,
                            audio_session_t sessionId) {
     PlayerBase::init(playerType, usage, sessionId);
-    mAudioTrack = pat;
-    if (mAudioTrack != 0) {
+    mAudioTrack.store(pat);
+    if (pat != 0) {
         mCallbackHandle = callback;
         mSelfAudioDeviceCallback = new SelfAudioDeviceCallback(*this);
-        mAudioTrack->addAudioDeviceCallback(mSelfAudioDeviceCallback);
-        mAudioTrack->setPlayerIId(mPIId); // set in PlayerBase::init().
+        pat->addAudioDeviceCallback(mSelfAudioDeviceCallback);
+        pat->setPlayerIId(mPIId);  // set in PlayerBase::init().
     }
 }
 
@@ -65,12 +65,15 @@
 }
 
 void TrackPlayerBase::doDestroy() {
-    if (mAudioTrack != 0) {
-        mAudioTrack->stop();
-        mAudioTrack->removeAudioDeviceCallback(mSelfAudioDeviceCallback);
+    sp<AudioTrack> audioTrack = getAudioTrack();
+
+    // Note that there may still be another reference in post-unlock phase of SetPlayState
+    clearAudioTrack();
+
+    if (audioTrack != 0) {
+        audioTrack->stop();
+        audioTrack->removeAudioDeviceCallback(mSelfAudioDeviceCallback);
         mSelfAudioDeviceCallback.clear();
-        // Note that there may still be another reference in post-unlock phase of SetPlayState
-        mAudioTrack.clear();
     }
 }
 
@@ -87,16 +90,16 @@
 // Implementation of IPlayer
 status_t TrackPlayerBase::playerStart() {
     status_t status = NO_INIT;
-    if (mAudioTrack != 0) {
-        status = mAudioTrack->start();
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
+        status = audioTrack->start();
     }
     return status;
 }
 
 status_t TrackPlayerBase::playerPause() {
     status_t status = NO_INIT;
-    if (mAudioTrack != 0) {
-        mAudioTrack->pause();
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
+        audioTrack->pause();
         status = NO_ERROR;
     }
     return status;
@@ -105,8 +108,8 @@
 
 status_t TrackPlayerBase::playerStop() {
     status_t status = NO_INIT;
-    if (mAudioTrack != 0) {
-        mAudioTrack->stop();
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
+        audioTrack->stop();
         status = NO_ERROR;
     }
     return status;
@@ -118,10 +121,10 @@
 
 status_t TrackPlayerBase::doSetVolume() {
     status_t status = NO_INIT;
-    if (mAudioTrack != 0) {
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
         float tl = mPlayerVolumeL * mPanMultiplierL * mVolumeMultiplierL;
         float tr = mPlayerVolumeR * mPanMultiplierR * mVolumeMultiplierR;
-        mAudioTrack->setVolume(tl, tr);
+        audioTrack->setVolume(tl, tr);
         status = NO_ERROR;
     }
     return status;
@@ -140,10 +143,9 @@
     if (s != OK) {
         return binderStatusFromStatusT(s);
     }
-
-    if (mAudioTrack != 0) {
+    if (sp<AudioTrack> audioTrack = getAudioTrack(); audioTrack != 0) {
         ALOGD("TrackPlayerBase::applyVolumeShaper() from IPlayer");
-        VolumeShaper::Status status = mAudioTrack->applyVolumeShaper(spConfiguration, spOperation);
+        VolumeShaper::Status status = audioTrack->applyVolumeShaper(spConfiguration, spOperation);
         if (status < 0) { // a non-negative value is the volume shaper id.
             ALOGE("TrackPlayerBase::applyVolumeShaper() failed with status %d", status);
         }
diff --git a/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl b/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
deleted file mode 100644
index 2255d4c..0000000
--- a/media/libaudioclient/aidl/android/media/AudioPolicyForcedConfig.aidl
+++ /dev/null
@@ -1,39 +0,0 @@
-/*
- * Copyright (C) 2021 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package android.media;
-
-/**
- * {@hide}
- */
-@Backing(type="int")
-enum AudioPolicyForcedConfig {
-    NONE = 0,
-    SPEAKER = 1,
-    HEADPHONES = 2,
-    BT_SCO = 3,
-    BT_A2DP = 4,
-    WIRED_ACCESSORY = 5,
-    BT_CAR_DOCK = 6,
-    BT_DESK_DOCK = 7,
-    ANALOG_DOCK = 8,
-    DIGITAL_DOCK = 9,
-    NO_BT_A2DP = 10, /* A2DP sink is not preferred to speaker or wired HS */
-    SYSTEM_ENFORCED = 11,
-    HDMI_SYSTEM_AUDIO_ENFORCED = 12,
-    ENCODED_SURROUND_NEVER = 13,
-    ENCODED_SURROUND_ALWAYS = 14,
-    ENCODED_SURROUND_MANUAL = 15,
-}
diff --git a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
index b814b85..4b26d5b 100644
--- a/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
+++ b/media/libaudioclient/aidl/android/media/GetOutputForAttrResponse.aidl
@@ -39,4 +39,6 @@
     boolean isBitPerfect;
     /** The corrected audio attributes. **/
     AudioAttributes attr;
+    /** initial port volume for the new audio track */
+    float volume;
 }
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 31d3af5..1c825bc 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -41,6 +41,7 @@
 import android.media.ISoundDoseCallback;
 import android.media.MicrophoneInfoFw;
 import android.media.RenderPosition;
+import android.media.TrackInternalMuteInfo;
 import android.media.TrackSecondaryOutputInfo;
 import android.media.audio.common.AudioChannelLayout;
 import android.media.audio.common.AudioFormatDescription;
@@ -93,13 +94,18 @@
     float getMasterBalance();
 
     /*
-     * Set/gets stream type state. This will probably be used by
+     * Set stream type state. This will probably be used by
      * the preference panel, mostly.
      */
     void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
     void setStreamMute(AudioStreamType stream, boolean muted);
-    float streamVolume(AudioStreamType stream, int /* audio_io_handle_t */ output);
-    boolean streamMute(AudioStreamType stream);
+
+    /*
+     * Set AudioTrack port ids volume attribute. This is the new way of controlling volume from
+     * AudioPolicyManager to AudioFlinger.
+     */
+    void setPortsVolume(in int[] /* audio_port_handle_t[] */ portIds, float volume,
+            int /* audio_io_handle_t */ output);
 
     // set audio mode.
     void setMode(AudioMode mode);
@@ -293,6 +299,17 @@
      */
     AudioPortFw getAudioMixPort(in AudioPortFw devicePort, in AudioPortFw mixPort);
 
+    /**
+     * Set internal mute for a list of tracks.
+     */
+    void setTracksInternalMute(in TrackInternalMuteInfo[] tracksInternalMute);
+
+    /*
+     * Reset Circular references in AudioFlinger service.
+     * Test API
+     */
+     void resetReferencesForTest();
+
     // When adding a new method, please review and update
     // IAudioFlinger.h AudioFlingerServerAdapter::Delegate::TransactionCode
     // AudioFlinger.cpp AudioFlinger::onTransactWrapper()
diff --git a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
index 633493c..b4f879a 100644
--- a/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioPolicyService.aidl
@@ -25,8 +25,6 @@
 import android.media.AudioOffloadMode;
 import android.media.AudioPatchFw;
 import android.media.AudioPolicyDeviceState;
-import android.media.AudioPolicyForcedConfig;
-import android.media.AudioPolicyForceUse;
 import android.media.AudioPortFw;
 import android.media.AudioPortConfigFw;
 import android.media.AudioPortRole;
@@ -49,6 +47,8 @@
 import android.media.audio.common.AudioDeviceDescription;
 import android.media.audio.common.AudioFormatDescription;
 import android.media.audio.common.AudioMode;
+import android.media.audio.common.AudioPolicyForcedConfig;
+import android.media.audio.common.AudioPolicyForceUse;
 import android.media.audio.common.AudioProfile;
 import android.media.audio.common.AudioOffloadInfo;
 import android.media.audio.common.AudioPort;
@@ -58,6 +58,8 @@
 import android.media.audio.common.AudioUuid;
 import android.media.audio.common.Int;
 
+import com.android.media.permission.INativePermissionController;
+
 /**
  * IAudioPolicyService interface (see AudioPolicyInterface for method descriptions).
  *
@@ -114,6 +116,10 @@
 
     void releaseInput(int /* audio_port_handle_t */ portId);
 
+    void setDeviceAbsoluteVolumeEnabled(in AudioDevice device,
+                                        boolean enabled,
+                                        AudioStreamType streamToDriveAbs);
+
     void initStreamVolume(AudioStreamType stream,
                           int indexMin,
                           int indexMax);
@@ -471,6 +477,11 @@
                                        int /* uid_t */ uid);
 
 
+    /**
+     * Get the native permission controller for audioserver, to push package and permission info
+     * required to control audio access.
+     */
+    INativePermissionController getPermissionController();
     // When adding a new method, please review and update
     // AudioPolicyService.cpp AudioPolicyService::onTransact()
     // AudioPolicyService.cpp IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index ddda8bb..73610a8 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -19,6 +19,7 @@
 import android.media.AudioPortFw;
 import android.media.audio.common.AudioConfig;
 import android.media.audio.common.AudioConfigBase;
+import android.media.audio.common.AudioAttributes;
 
 /**
  * {@hide}
@@ -32,4 +33,5 @@
     AudioPortFw device;
     /** Bitmask, indexed by AudioOutputFlag. */
     int flags;
+    AudioAttributes attributes;
 }
diff --git a/media/libmedia/include/media/CodecServiceRegistrant.h b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
similarity index 72%
copy from media/libmedia/include/media/CodecServiceRegistrant.h
copy to media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
index e0af781..05b1fa4 100644
--- a/media/libmedia/include/media/CodecServiceRegistrant.h
+++ b/media/libaudioclient/aidl/android/media/TrackInternalMuteInfo.aidl
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright (C) 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,10 +14,10 @@
  * limitations under the License.
  */
 
-#ifndef CODEC_SERVICE_REGISTRANT_H_
+package android.media;
 
-#define CODEC_SERVICE_REGISTRANT_H_
-
-typedef void (*RegisterCodecServicesFunc)();
-
-#endif  // CODEC_SERVICE_REGISTRANT_H_
+parcelable TrackInternalMuteInfo {
+    /* Interpreted as audio_port_handle_t. */
+    int portId;
+    boolean muted;
+}
diff --git a/media/libaudioclient/aidl/fuzzer/Android.bp b/media/libaudioclient/aidl/fuzzer/Android.bp
index 6cfccd6..61d5ccd 100644
--- a/media/libaudioclient/aidl/fuzzer/Android.bp
+++ b/media/libaudioclient/aidl/fuzzer/Android.bp
@@ -22,63 +22,31 @@
     name: "libaudioclient_aidl_fuzzer_defaults",
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
+        "audiopermissioncontroller",
         "libaudiomockhal",
         "libcgrouprc",
         "libcgrouprc_format",
         "libfakeservicemanager",
         "libjsoncpp",
-        "liblog",
-        "libmedia_helper",
         "libmediametricsservice",
         "libprocessgroup",
         "shared-file-region-aidl-cpp",
     ],
     shared_libs: [
         "android.hardware.audio.common-util",
-        "audioclient-types-aidl-cpp",
-        "audioflinger-aidl-cpp",
-        "audiopolicy-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "av-types-aidl-cpp",
-        "capture_state_listener-aidl-cpp",
-        "effect-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libactivitymanager_aidl",
-        "libaudioclient",
-        "libaudioclient_aidl_conversion",
         "libaudioflinger",
-        "libaudiofoundation",
-        "libaudiohal",
-        "libaudiomanager",
-        "libaudiopolicy",
-        "libaudiopolicymanagerdefault",
         "libaudiopolicyservice",
-        "libaudioprocessing",
-        "libaudioutils",
         "libdl",
-        "libheadtracking",
-        "libmediametrics",
-        "libmediautils",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libvibrator",
         "libvndksupport",
-        "libxml2",
         "mediametricsservice-aidl-cpp",
-        "packagemanager_aidl-cpp",
     ],
     header_libs: [
-        "libaudioflinger_headers",
-        "libaudiofoundation_headers",
-        "libaudiohal_headers",
         "libaudiopolicymanager_interface_headers",
-        "libbinder_headers",
         "libmedia_headers",
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-audio-fuzzing-reports@google.com",
         ],
         componentid: 155276,
         hotlists: ["4593311"],
@@ -98,6 +66,8 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "libaudioclient_aidl_fuzzer_defaults",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
         "service_fuzzer_defaults",
     ],
 }
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..c1e1de5
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..8e49acd
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..a8ffcae
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3 b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..7c25f6e
--- /dev/null
+++ b/media/libaudioclient/aidl/fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index dfdb4cf..710a656 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -26,6 +26,7 @@
 #include <android/content/AttributionSourceState.h>
 #include <binder/IServiceManager.h>
 #include <binder/MemoryDealer.h>
+#include <com_android_media_audioserver.h>
 #include <media/AidlConversion.h>
 #include <media/AudioEffect.h>
 #include <media/AudioRecord.h>
@@ -41,6 +42,8 @@
 constexpr int32_t kMaxSampleRateHz = 192000;
 constexpr int32_t kSampleRateUnspecified = 0;
 
+namespace audioserver_flags = com::android::media::audioserver;
+
 using namespace std;
 using namespace android;
 
@@ -501,13 +504,19 @@
     AudioSystem::getMasterMute(&state);
     AudioSystem::isMicrophoneMuted(&state);
 
-    audio_stream_type_t stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::setStreamMute(getValue(&mFdp, kStreamtypes), mFdp.ConsumeBool());
+    audio_stream_type_t stream ;
+    if (!audioserver_flags::portid_volume_management()) {
+        stream = getValue(&mFdp, kStreamtypes);
+        AudioSystem::setStreamMute(getValue(&mFdp, kStreamtypes), mFdp.ConsumeBool());
 
-    stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::setStreamVolume(stream, mFdp.ConsumeFloatingPoint<float>(),
-                                 mFdp.ConsumeIntegral<int32_t>());
-
+        stream = getValue(&mFdp, kStreamtypes);
+        AudioSystem::setStreamVolume(stream, mFdp.ConsumeFloatingPoint<float>(),
+                                     mFdp.ConsumeIntegral<int32_t>());
+    } else {
+        std::vector <audio_port_handle_t> portsForVolumeChange{};
+        AudioSystem::setPortsVolume(portsForVolumeChange, mFdp.ConsumeFloatingPoint<float>(),
+                                    mFdp.ConsumeIntegral<int32_t>());
+    }
     audio_mode_t mode = getValue(&mFdp, kModes);
     AudioSystem::setMode(mode);
 
@@ -519,12 +528,6 @@
     stream = getValue(&mFdp, kStreamtypes);
     AudioSystem::getOutputLatency(&latency, stream);
 
-    stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::getStreamVolume(stream, &volume, mFdp.ConsumeIntegral<int32_t>());
-
-    stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::getStreamMute(stream, &state);
-
     uint32_t samplingRate;
     AudioSystem::getSamplingRate(mFdp.ConsumeIntegral<int32_t>(), &samplingRate);
 
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index d4479ef..25d91d3 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -138,6 +138,12 @@
                                       audio_format_t format,
                                       audio_channel_mask_t channelMask);
 
+    /* Checks for erroneous status, marks error in MediaMetrics, logs the error message.
+     * Updates and returns mStatus.
+     */
+    status_t logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage,
+                                       const std::string& func);
+
     /* How data is transferred from AudioRecord
      */
     enum transfer_type {
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 5c9a7c6..40e5673 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -124,15 +124,22 @@
     static status_t setMasterMute(bool mute);
     static status_t getMasterMute(bool* mute);
 
-    // set/get stream volume on specified output
+    // set stream volume on specified output
     static status_t setStreamVolume(audio_stream_type_t stream, float value,
                                     audio_io_handle_t output);
-    static status_t getStreamVolume(audio_stream_type_t stream, float* volume,
-                                    audio_io_handle_t output);
 
     // mute/unmute stream
     static status_t setStreamMute(audio_stream_type_t stream, bool mute);
-    static status_t getStreamMute(audio_stream_type_t stream, bool* mute);
+
+    /**
+     * Set volume for given AudioTrack port ids on specified output
+     * @param portIds to consider
+     * @param volume to set
+     * @param output to consider
+     * @return NO_ERROR if successful
+     */
+    static status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds,
+                                   float volume, audio_io_handle_t output);
 
     // set audio mode in audio hardware
     static status_t setMode(audio_mode_t mode);
@@ -337,7 +344,8 @@
                                      audio_port_handle_t *portId,
                                      std::vector<audio_io_handle_t> *secondaryOutputs,
                                      bool *isSpatialized,
-                                     bool *isBitPerfect);
+                                     bool *isBitPerfect,
+                                     float *volume);
     static status_t startOutput(audio_port_handle_t portId);
     static status_t stopOutput(audio_port_handle_t portId);
     static void releaseOutput(audio_port_handle_t portId);
@@ -374,9 +382,13 @@
     static status_t startInput(audio_port_handle_t portId);
     static status_t stopInput(audio_port_handle_t portId);
     static void releaseInput(audio_port_handle_t portId);
+    static status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                   const char *address,
+                                                   bool enabled,
+                                                   audio_stream_type_t streamToDriveAbs);
     static status_t initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax);
+                                     int indexMin,
+                                     int indexMax);
     static status_t setStreamVolumeIndex(audio_stream_type_t stream,
                                          int index,
                                          audio_devices_t device);
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index 3a001a4..de97863 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -233,8 +233,7 @@
      * FIXME This API assumes a route, and so should be deprecated.
      */
 
-    static status_t getMinFrameCount(size_t* frameCount,
-                                     audio_stream_type_t streamType,
+    static status_t getMinFrameCount(size_t* frameCount, audio_stream_type_t streamType,
                                      uint32_t sampleRate);
 
     /* Check if direct playback is possible for the given audio configuration and attributes.
@@ -243,6 +242,11 @@
     static bool isDirectOutputSupported(const audio_config_base_t& config,
                                         const audio_attributes_t& attributes);
 
+    /* Checks for erroneous status, logs the error message.
+     * Updates and returns mStatus.
+     */
+    status_t logIfErrorAndReturnStatus(status_t status, const std::string& errorMessage);
+
     /* How data is transferred to AudioTrack
      */
     enum transfer_type {
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 5a1e037..a5f3217 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -60,6 +60,7 @@
 #include "android/media/OpenInputResponse.h"
 #include "android/media/OpenOutputRequest.h"
 #include "android/media/OpenOutputResponse.h"
+#include "android/media/TrackInternalMuteInfo.h"
 #include "android/media/TrackSecondaryOutputInfo.h"
 
 namespace android {
@@ -228,9 +229,15 @@
                                     audio_io_handle_t output) = 0;
     virtual     status_t    setStreamMute(audio_stream_type_t stream, bool muted) = 0;
 
-    virtual     float       streamVolume(audio_stream_type_t stream,
-                                    audio_io_handle_t output) const = 0;
-    virtual     bool        streamMute(audio_stream_type_t stream) const = 0;
+    /**
+     * Set volume for given AudioTrack port ids on specified output
+     * @param portIds to consider
+     * @param volume to set
+     * @param output to consider
+     * @return NO_ERROR if successful
+     */
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+            audio_io_handle_t output) = 0;
 
     // set audio mode
     virtual     status_t    setMode(audio_mode_t mode) = 0;
@@ -388,6 +395,11 @@
 
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) const = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
+
+    virtual status_t resetReferencesForTest() = 0;
 };
 
 /**
@@ -418,9 +430,8 @@
     status_t setStreamVolume(audio_stream_type_t stream, float value,
                              audio_io_handle_t output) override;
     status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
-    float streamVolume(audio_stream_type_t stream,
-                       audio_io_handle_t output) const override;
-    bool streamMute(audio_stream_type_t stream) const override;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+            audio_io_handle_t output) override;
     status_t setMode(audio_mode_t mode) override;
     status_t setMicMute(bool state) override;
     bool getMicMute() const override;
@@ -504,6 +515,9 @@
     status_t getAudioPolicyConfig(media::AudioPolicyConfig* output) override;
     status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                              struct audio_port_v7 *mixPort) const override;
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+    status_t resetReferencesForTest() override;
 
 private:
     const sp<media::IAudioFlingerService> mDelegate;
@@ -540,8 +554,7 @@
             MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_masterMute,
             SET_STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_setStreamVolume,
             SET_STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_setStreamMute,
-            STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_streamVolume,
-            STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_streamMute,
+            SET_PORTS_VOLUME = media::BnAudioFlingerService::TRANSACTION_setPortsVolume,
             SET_MODE = media::BnAudioFlingerService::TRANSACTION_setMode,
             SET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_setMicMute,
             GET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_getMicMute,
@@ -606,6 +619,9 @@
             GET_AUDIO_POLICY_CONFIG =
                     media::BnAudioFlingerService::TRANSACTION_getAudioPolicyConfig,
             GET_AUDIO_MIX_PORT = media::BnAudioFlingerService::TRANSACTION_getAudioMixPort,
+            SET_TRACKS_INTERNAL_MUTE = media::BnAudioFlingerService::TRANSACTION_setTracksInternalMute,
+            RESET_REFERENCES_FOR_TEST =
+                    media::BnAudioFlingerService::TRANSACTION_resetReferencesForTest,
         };
 
     protected:
@@ -661,9 +677,8 @@
     Status setStreamVolume(media::audio::common::AudioStreamType stream,
                            float value, int32_t output) override;
     Status setStreamMute(media::audio::common::AudioStreamType stream, bool muted) override;
-    Status streamVolume(media::audio::common::AudioStreamType stream,
-                        int32_t output, float* _aidl_return) override;
-    Status streamMute(media::audio::common::AudioStreamType stream, bool* _aidl_return) override;
+    Status setPortsVolume(const std::vector<int32_t>& portIds, float volume, int32_t output)
+            override;
     Status setMode(media::audio::common::AudioMode mode) override;
     Status setMicMute(bool state) override;
     Status getMicMute(bool* _aidl_return) override;
@@ -742,6 +757,9 @@
     Status getAudioMixPort(const media::AudioPortFw& devicePort,
                            const media::AudioPortFw& mixPort,
                            media::AudioPortFw* _aidl_return) override;
+    Status setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+    Status resetReferencesForTest() override;
 private:
     const sp<AudioFlingerServerAdapter::Delegate> mDelegate;
 };
diff --git a/media/libaudioclient/include/media/PolicyAidlConversion.h b/media/libaudioclient/include/media/PolicyAidlConversion.h
index ed9ddd6..1b90d6b 100644
--- a/media/libaudioclient/include/media/PolicyAidlConversion.h
+++ b/media/libaudioclient/include/media/PolicyAidlConversion.h
@@ -28,8 +28,6 @@
 #include <android/media/AudioMixRouteFlag.h>
 #include <android/media/AudioMixType.h>
 #include <android/media/AudioOffloadMode.h>
-#include <android/media/AudioPolicyForceUse.h>
-#include <android/media/AudioPolicyForcedConfig.h>
 #include <android/media/DeviceRole.h>
 
 #include <media/AidlConversionUtil.h>
@@ -84,16 +82,6 @@
 ConversionResult<media::AudioPolicyDeviceState>
 legacy2aidl_audio_policy_dev_state_t_AudioPolicyDeviceState(audio_policy_dev_state_t legacy);
 
-ConversionResult<audio_policy_force_use_t>
-aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(media::AudioPolicyForceUse aidl);
-ConversionResult<media::AudioPolicyForceUse>
-legacy2aidl_audio_policy_force_use_t_AudioPolicyForceUse(audio_policy_force_use_t legacy);
-
-ConversionResult<audio_policy_forced_cfg_t>
-aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(media::AudioPolicyForcedConfig aidl);
-ConversionResult<media::AudioPolicyForcedConfig>
-legacy2aidl_audio_policy_forced_cfg_t_AudioPolicyForcedConfig(audio_policy_forced_cfg_t legacy);
-
 ConversionResult<device_role_t>
 aidl2legacy_DeviceRole_device_role_t(media::DeviceRole aidl);
 ConversionResult<media::DeviceRole>
diff --git a/media/libaudioclient/include/media/TrackPlayerBase.h b/media/libaudioclient/include/media/TrackPlayerBase.h
index fe88116..8df9ff8 100644
--- a/media/libaudioclient/include/media/TrackPlayerBase.h
+++ b/media/libaudioclient/include/media/TrackPlayerBase.h
@@ -19,6 +19,7 @@
 
 #include <media/AudioTrack.h>
 #include <media/PlayerBase.h>
+#include <mediautils/Synchronization.h>
 
 namespace android {
 
@@ -37,10 +38,11 @@
             const media::VolumeShaperConfiguration& configuration,
             const media::VolumeShaperOperation& operation);
 
-    //FIXME move to protected field, so far made public to minimize changes to AudioTrack logic
-    sp<AudioTrack> mAudioTrack;
+    sp<AudioTrack> getAudioTrack() { return mAudioTrack.load(); }
 
-            void setPlayerVolume(float vl, float vr);
+    void clearAudioTrack() { mAudioTrack.store(nullptr); }
+
+    void setPlayerVolume(float vl, float vr);
 
 protected:
 
@@ -68,6 +70,7 @@
     float mPlayerVolumeL, mPlayerVolumeR;
     sp<AudioTrack::IAudioTrackCallback> mCallbackHandle;
     sp<SelfAudioDeviceCallback> mSelfAudioDeviceCallback;
+    mediautils::atomic_sp<AudioTrack> mAudioTrack;
 };
 
 } // namespace android
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 055da5b..ddf14a3 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -122,6 +122,7 @@
     ],
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
         "audiopolicy-aidl-cpp",
@@ -133,6 +134,9 @@
         "libaudiomanager",
         "libaudiopolicy",
     ],
+    cflags: [
+        "-Wthread-safety",
+    ],
     data: ["bbb*.raw"],
     srcs: [
         "audio_test_utils.cpp",
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 0be1d7e..7f55e48 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -483,28 +483,8 @@
                                  AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
                                          std::vector<int32_t>{1, 2}))));
 
-TEST(AnonymizedBluetoothAddressRoundTripTest, Legacy2Aidl2Legacy) {
-    const std::vector<uint8_t> sAnonymizedAidlAddress =
-            std::vector<uint8_t>{0xFD, 0xFF, 0xFF, 0xFF, 0xAB, 0xCD};
-    const std::string sAnonymizedLegacyAddress = std::string("XX:XX:XX:XX:AB:CD");
-    auto device = legacy2aidl_audio_device_AudioDevice(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                                                       sAnonymizedLegacyAddress);
-    ASSERT_TRUE(device.ok());
-    ASSERT_EQ(AudioDeviceAddress::Tag::mac, device.value().address.getTag());
-    ASSERT_EQ(sAnonymizedAidlAddress, device.value().address.get<AudioDeviceAddress::mac>());
-
-    audio_devices_t legacyType;
-    std::string legacyAddress;
-    status_t status =
-            aidl2legacy_AudioDevice_audio_device(device.value(), &legacyType, &legacyAddress);
-    ASSERT_EQ(OK, status);
-    EXPECT_EQ(legacyType, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP);
-    EXPECT_EQ(sAnonymizedLegacyAddress, legacyAddress);
-}
-
 class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
 };
-
 TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = GetParam();
     auto conv = aidl2legacy_AudioFormatDescription_audio_format_t(initial);
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index 9a202cc3..1599839 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -28,25 +28,35 @@
 
 void OnAudioDeviceUpdateNotifier::onAudioDeviceUpdate(audio_io_handle_t audioIo,
                                                       audio_port_handle_t deviceId) {
-    std::unique_lock<std::mutex> lock{mMutex};
     ALOGI("%s: audioIo=%d deviceId=%d", __func__, audioIo, deviceId);
-    mAudioIo = audioIo;
-    mDeviceId = deviceId;
+    {
+        std::lock_guard lock(mMutex);
+        mAudioIo = audioIo;
+        mDeviceId = deviceId;
+    }
     mCondition.notify_all();
 }
 
 status_t OnAudioDeviceUpdateNotifier::waitForAudioDeviceCb(audio_port_handle_t expDeviceId) {
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
     if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
         (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
         mCondition.wait_for(lock, std::chrono::milliseconds(500));
         if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
-            (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId))
+            (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
             return TIMED_OUT;
+        }
     }
     return OK;
 }
 
+std::pair<audio_io_handle_t, audio_port_handle_t>
+OnAudioDeviceUpdateNotifier::getLastPortAndDevice() const {
+    std::lock_guard lock(mMutex);
+    return {mAudioIo, mDeviceId};
+}
+
 AudioPlayback::AudioPlayback(uint32_t sampleRate, audio_format_t format,
                              audio_channel_mask_t channelMask, audio_output_flags_t flags,
                              audio_session_t sessionId, AudioTrack::transfer_type transferType,
@@ -147,9 +157,8 @@
 }
 
 void AudioPlayback::onBufferEnd() {
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::lock_guard lock(mMutex);
     mStopPlaying = true;
-    mCondition.notify_all();
 }
 
 status_t AudioPlayback::fillBuffer() {
@@ -187,7 +196,12 @@
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
     size_t totalFrameCount = mMemCapacity / mTrack->frameSize();
-    while (!mStopPlaying && counter < maxTries) {
+    bool stopPlaying;
+    {
+        std::lock_guard lock(mMutex);
+        stopPlaying = mStopPlaying;
+    }
+    while (!stopPlaying && counter < maxTries) {
         uint32_t currPosition;
         mTrack->getPosition(&currPosition);
         if (currPosition >= totalFrameCount) counter++;
@@ -213,7 +227,10 @@
             mTrack->start();
         }
         std::this_thread::sleep_for(std::chrono::milliseconds(WAIT_PERIOD_MS));
+        std::lock_guard lock(mMutex);
+        stopPlaying = mStopPlaying;
     }
+    std::lock_guard lock(mMutex);
     if (!mStopPlaying && counter == maxTries) return TIMED_OUT;
     return OK;
 }
@@ -228,8 +245,10 @@
 }
 
 void AudioPlayback::stop() {
-    std::unique_lock<std::mutex> lock{mMutex};
-    mStopPlaying = true;
+    {
+        std::lock_guard lock(mMutex);
+        mStopPlaying = true;
+    }
     if (mState != PLAY_STOPPED && mState != PLAY_NO_INIT) {
         int32_t msec = 0;
         (void)mTrack->pendingDuration(&msec);
@@ -257,10 +276,13 @@
         return 0;
     }
 
-    // no more frames to read
-    if (mNumFramesReceived >= mNumFramesToRecord || mStopRecording) {
-        mStopRecording = true;
-        return 0;
+    {
+        std::lock_guard l(mMutex);
+        // no more frames to read
+        if (mNumFramesReceived >= mNumFramesToRecord || mStopRecording) {
+            mStopRecording = true;
+            return 0;
+        }
     }
 
     int64_t timeUs = 0, position = 0, timeNs = 0;
@@ -272,6 +294,7 @@
         ts.getBestTimestamp(&position, &timeNs, ExtendedTimestamp::TIMEBASE_MONOTONIC, &location) ==
                 OK) {
         // Use audio timestamp.
+        std::lock_guard l(mMutex);
         timeUs = timeNs / 1000 -
                  (position - mNumFramesReceived + mNumFramesLost) * usPerSec / mSampleRate;
     } else {
@@ -300,6 +323,7 @@
         } else {
             numLostBytes = 0;
         }
+        std::lock_guard l(mMutex);
         const int64_t timestampUs =
                 ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
                 mRecord->getSampleRate();
@@ -313,6 +337,7 @@
     if (buffer.size() == 0) {
         ALOGW("Nothing is available from AudioRecord callback buffer");
     } else {
+        std::lock_guard l(mMutex);
         const size_t bufferSize = buffer.size();
         const int64_t timestampUs =
                 ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
@@ -324,9 +349,12 @@
     }
 
     if (tmpQueue.size() > 0) {
-        std::unique_lock<std::mutex> lock{mMutex};
-        for (auto it = tmpQueue.begin(); it != tmpQueue.end(); it++)
-            mBuffersReceived.push_back(std::move(*it));
+        {
+            std::lock_guard lock(mMutex);
+            mBuffersReceived.insert(mBuffersReceived.end(),
+                                    std::make_move_iterator(tmpQueue.begin()),
+                                    std::make_move_iterator(tmpQueue.end()));
+        }
         mCondition.notify_all();
     }
     return buffer.size();
@@ -334,17 +362,24 @@
 
 void AudioCapture::onOverrun() {
     ALOGV("received event overrun");
-    mBufferOverrun = true;
 }
 
 void AudioCapture::onMarker(uint32_t markerPosition) {
     ALOGV("received Callback at position %d", markerPosition);
-    mReceivedCbMarkerAtPosition = markerPosition;
+    {
+        std::lock_guard l(mMutex);
+        mReceivedCbMarkerAtPosition = markerPosition;
+    }
+    mMarkerCondition.notify_all();
 }
 
 void AudioCapture::onNewPos(uint32_t markerPosition) {
     ALOGV("received Callback at position %d", markerPosition);
-    mReceivedCbMarkerCount++;
+    {
+        std::lock_guard l(mMutex);
+        mReceivedCbMarkerCount = mReceivedCbMarkerCount.value_or(0) + 1;
+    }
+    mMarkerCondition.notify_all();
 }
 
 void AudioCapture::onNewIAudioRecord() {
@@ -362,20 +397,7 @@
       mFlags(flags),
       mSessionId(sessionId),
       mTransferType(transferType),
-      mAttributes(attributes) {
-    mFrameCount = 0;
-    mNotificationFrames = 0;
-    mNumFramesToRecord = 0;
-    mNumFramesReceived = 0;
-    mNumFramesLost = 0;
-    mBufferOverrun = false;
-    mMarkerPosition = 0;
-    mMarkerPeriod = 0;
-    mReceivedCbMarkerAtPosition = -1;
-    mReceivedCbMarkerCount = 0;
-    mState = REC_NO_INIT;
-    mStopRecording = false;
-}
+      mAttributes(attributes) {}
 
 AudioCapture::~AudioCapture() {
     if (mOutFileFd > 0) close(mOutFileFd);
@@ -484,7 +506,10 @@
 
 status_t AudioCapture::stop() {
     status_t status = OK;
-    mStopRecording = true;
+    {
+        std::lock_guard l(mMutex);
+        mStopRecording = true;
+    }
     if (mState != REC_STOPPED && mState != REC_NO_INIT) {
         if (mInputSource != AUDIO_SOURCE_DEFAULT) {
             bool state = false;
@@ -503,25 +528,32 @@
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
     size_t nonContig = 0;
-    while (mNumFramesReceived < mNumFramesToRecord) {
+    int64_t numFramesReceived;
+    {
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
+    }
+    while (numFramesReceived < mNumFramesToRecord) {
         AudioRecord::Buffer recordBuffer;
         recordBuffer.frameCount = mNotificationFrames;
         status_t status = mRecord->obtainBuffer(&recordBuffer, 1, &nonContig);
         if (OK == status) {
             const int64_t timestampUs =
-                    ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
+                    ((1000000LL * numFramesReceived) + (mRecord->getSampleRate() >> 1)) /
                     mRecord->getSampleRate();
             RawBuffer buff{-1, timestampUs, static_cast<int32_t>(recordBuffer.size())};
             memcpy(buff.mData.get(), recordBuffer.data(), recordBuffer.size());
             buffer = std::move(buff);
-            mNumFramesReceived += recordBuffer.size() / mRecord->frameSize();
+            numFramesReceived += recordBuffer.size() / mRecord->frameSize();
             mRecord->releaseBuffer(&recordBuffer);
             counter = 0;
         } else if (WOULD_BLOCK == status) {
             // if not received a buffer for MAX_WAIT_TIME_MS, something has gone wrong
-            if (counter == maxTries) return TIMED_OUT;
-            counter++;
+            if (counter++ == maxTries) status = TIMED_OUT;
         }
+        std::lock_guard l(mMutex);
+        mNumFramesReceived = numFramesReceived;
+        if (TIMED_OUT == status) return status;
     }
     return OK;
 }
@@ -530,7 +562,8 @@
     if (REC_STARTED != mState) return INVALID_OPERATION;
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
     while (mBuffersReceived.empty() && !mStopRecording && counter < maxTries) {
         mCondition.wait_for(lock, std::chrono::milliseconds(WAIT_PERIOD_MS));
         counter++;
@@ -548,7 +581,12 @@
 status_t AudioCapture::audioProcess() {
     RawBuffer buffer;
     status_t status = OK;
-    while (mNumFramesReceived < mNumFramesToRecord && status == OK) {
+    int64_t numFramesReceived;
+    {
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
+    }
+    while (numFramesReceived < mNumFramesToRecord && status == OK) {
         if (mTransferType == AudioRecord::TRANSFER_CALLBACK)
             status = obtainBufferCb(buffer);
         else
@@ -557,10 +595,52 @@
             const char* ptr = static_cast<const char*>(static_cast<void*>(buffer.mData.get()));
             write(mOutFileFd, ptr, buffer.mCapacity);
         }
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
     }
     return OK;
 }
 
+uint32_t AudioCapture::getMarkerPeriod() const {
+    std::lock_guard l(mMutex);
+    return mMarkerPeriod;
+}
+
+uint32_t AudioCapture::getMarkerPosition() const {
+    std::lock_guard l(mMutex);
+    return mMarkerPosition;
+}
+
+void AudioCapture::setMarkerPeriod(uint32_t markerPeriod) {
+    std::lock_guard l(mMutex);
+    mMarkerPeriod = markerPeriod;
+}
+
+void AudioCapture::setMarkerPosition(uint32_t markerPosition) {
+    std::lock_guard l(mMutex);
+    mMarkerPosition = markerPosition;
+}
+
+uint32_t AudioCapture::waitAndGetReceivedCbMarkerAtPosition() const {
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
+        android::base::ScopedLockAssertion lock_assertion(mMutex);
+        return mReceivedCbMarkerAtPosition.has_value();
+    });
+    return mReceivedCbMarkerAtPosition.value_or(~0);
+}
+
+uint32_t AudioCapture::waitAndGetReceivedCbMarkerCount() const {
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
+        android::base::ScopedLockAssertion lock_assertion(mMutex);
+        return mReceivedCbMarkerCount.has_value();
+    });
+    return mReceivedCbMarkerCount.value_or(0);
+}
+
 status_t listAudioPorts(std::vector<audio_port_v7>& portsVec) {
     int attempts = 5;
     status_t status;
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
index 76e4642..022ecf3 100644
--- a/media/libaudioclient/tests/audio_test_utils.h
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -19,14 +19,13 @@
 
 #include <sys/stat.h>
 #include <unistd.h>
-#include <atomic>
-#include <chrono>
-#include <cinttypes>
 #include <deque>
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <utility>
 
+#include <android-base/thread_annotations.h>
 #include <binder/MemoryDealer.h>
 #include <media/AidlConversion.h>
 #include <media/AudioRecord.h>
@@ -63,13 +62,15 @@
 
 class OnAudioDeviceUpdateNotifier : public AudioSystem::AudioDeviceCallback {
   public:
-    audio_io_handle_t mAudioIo = AUDIO_IO_HANDLE_NONE;
-    audio_port_handle_t mDeviceId = AUDIO_PORT_HANDLE_NONE;
-    std::mutex mMutex;
-    std::condition_variable mCondition;
-
-    void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId) override;
     status_t waitForAudioDeviceCb(audio_port_handle_t expDeviceId = AUDIO_PORT_HANDLE_NONE);
+    std::pair<audio_io_handle_t, audio_port_handle_t> getLastPortAndDevice() const;
+
+  private:
+    audio_io_handle_t mAudioIo GUARDED_BY(mMutex) = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mDeviceId GUARDED_BY(mMutex) = AUDIO_PORT_HANDLE_NONE;
+    mutable std::mutex mMutex;
+    std::condition_variable mCondition;
 };
 
 // Simple AudioPlayback class.
@@ -86,15 +87,14 @@
     status_t create();
     sp<AudioTrack> getAudioTrackHandle();
     status_t start();
-    status_t waitForConsumption(bool testSeek = false);
+    status_t waitForConsumption(bool testSeek = false) EXCLUDES(mMutex);
     status_t fillBuffer();
     status_t onProcess(bool testSeek = false);
-    virtual void onBufferEnd() override;
-    void stop();
+    void onBufferEnd() override EXCLUDES(mMutex);
+    void stop() EXCLUDES(mMutex);
 
-    bool mStopPlaying;
-    std::mutex mMutex;
-    std::condition_variable mCondition;
+    bool mStopPlaying GUARDED_BY(mMutex);
+    mutable std::mutex mMutex;
 
     enum State {
         PLAY_NO_INIT,
@@ -144,10 +144,10 @@
                  AudioRecord::transfer_type transferType = AudioRecord::TRANSFER_CALLBACK,
                  const audio_attributes_t* attributes = nullptr);
     ~AudioCapture();
-    size_t onMoreData(const AudioRecord::Buffer& buffer) override;
+    size_t onMoreData(const AudioRecord::Buffer& buffer) override EXCLUDES(mMutex);
     void onOverrun() override;
-    void onMarker(uint32_t markerPosition) override;
-    void onNewPos(uint32_t newPos) override;
+    void onMarker(uint32_t markerPosition) override EXCLUDES(mMutex);
+    void onNewPos(uint32_t newPos) override EXCLUDES(mMutex);
     void onNewIAudioRecord() override;
     status_t create();
     status_t setRecordDuration(float durationInSec);
@@ -156,21 +156,20 @@
     sp<AudioRecord> getAudioRecordHandle();
     status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE,
                    audio_session_t triggerSession = AUDIO_SESSION_NONE);
-    status_t obtainBufferCb(RawBuffer& buffer);
-    status_t obtainBuffer(RawBuffer& buffer);
-    status_t audioProcess();
-    status_t stop();
+    status_t obtainBufferCb(RawBuffer& buffer) EXCLUDES(mMutex);
+    status_t obtainBuffer(RawBuffer& buffer) EXCLUDES(mMutex);
+    status_t audioProcess() EXCLUDES(mMutex);
+    status_t stop() EXCLUDES(mMutex);
+    uint32_t getMarkerPeriod() const EXCLUDES(mMutex);
+    uint32_t getMarkerPosition() const EXCLUDES(mMutex);
+    void setMarkerPeriod(uint32_t markerPeriod) EXCLUDES(mMutex);
+    void setMarkerPosition(uint32_t markerPosition) EXCLUDES(mMutex);
+    uint32_t waitAndGetReceivedCbMarkerAtPosition() const EXCLUDES(mMutex);
+    uint32_t waitAndGetReceivedCbMarkerCount() const EXCLUDES(mMutex);
 
-    uint32_t mFrameCount;
-    uint32_t mNotificationFrames;
-    int64_t mNumFramesToRecord;
-    int64_t mNumFramesReceived;
-    int64_t mNumFramesLost;
-    uint32_t mMarkerPosition;
-    uint32_t mMarkerPeriod;
-    uint32_t mReceivedCbMarkerAtPosition;
-    uint32_t mReceivedCbMarkerCount;
-    bool mBufferOverrun;
+    uint32_t mFrameCount = 0;
+    uint32_t mNotificationFrames = 0;
+    int64_t mNumFramesToRecord = 0;
 
     enum State {
         REC_NO_INIT,
@@ -191,14 +190,23 @@
 
     size_t mMaxBytesPerCallback = 2048;
     sp<AudioRecord> mRecord;
-    State mState;
-    bool mStopRecording;
+    State mState = REC_NO_INIT;
+    bool mStopRecording GUARDED_BY(mMutex) = false;
     std::string mFileName;
     int mOutFileFd = -1;
 
-    std::mutex mMutex;
+    mutable std::mutex mMutex;
     std::condition_variable mCondition;
-    std::deque<RawBuffer> mBuffersReceived;
+    std::deque<RawBuffer> mBuffersReceived GUARDED_BY(mMutex);
+
+    mutable std::condition_variable mMarkerCondition;
+    uint32_t mMarkerPeriod GUARDED_BY(mMutex) = 0;
+    uint32_t mMarkerPosition GUARDED_BY(mMutex) = 0;
+    std::optional<uint32_t> mReceivedCbMarkerCount GUARDED_BY(mMutex);
+    std::optional<uint32_t> mReceivedCbMarkerAtPosition GUARDED_BY(mMutex);
+
+    int64_t mNumFramesReceived GUARDED_BY(mMutex) = 0;
+    int64_t mNumFramesLost GUARDED_BY(mMutex) = 0;
 };
 
 #endif  // AUDIO_TEST_UTILS_H_
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
index f4d37bc..199fb8b 100644
--- a/media/libaudioclient/tests/audioeffect_analyser.cpp
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -62,6 +62,15 @@
 constexpr int kNPointFFT = 16384;
 constexpr float kBinWidth = (float)kSamplingFrequency / kNPointFFT;
 
+// frequency used to generate testing tone
+constexpr uint32_t kTestFrequency = 1400;
+
+// Tolerance of audio gain difference in dB, which is 10^(0.1/20) (around 1.0116%) difference in
+// amplitude
+constexpr float kAudioGainDiffTolerancedB = .1f;
+
+const std::string kDataTempPath = "/data/local/tmp";
+
 const char* gPackageName = "AudioEffectAnalyser";
 
 static_assert(kPrimeDurationInSec + 2 * kNPointFFT / kSamplingFrequency < kCaptureDurationSec,
@@ -177,21 +186,30 @@
     return effect;
 }
 
-void computeFilterGainsAtTones(float captureDuration, int nPointFft, std::vector<int>& binOffsets,
-                               float* inputMag, float* gaindB, const char* res,
-                               audio_session_t sessionId) {
+void computeFilterGainsAtTones(float captureDuration, int nPointFft, std::vector<int> binOffsets,
+                               float* inputMag, float* gaindB, const std::string res,
+                               audio_session_t sessionId, const std::string res2 = "",
+                               audio_session_t sessionId2 = AUDIO_SESSION_NONE) {
     int totalFrameCount = captureDuration * kSamplingFrequency;
     auto output = pffft::AlignedVector<float>(totalFrameCount);
     auto fftOutput = pffft::AlignedVector<float>(nPointFft);
-    PlaybackEnv argsP;
-    argsP.mRes = std::string{res};
+    PlaybackEnv argsP, argsP2;
+    argsP.mRes = res;
     argsP.mSessionId = sessionId;
     CaptureEnv argsR;
     argsR.mCaptureDuration = captureDuration;
     std::thread playbackThread(&PlaybackEnv::play, &argsP);
+    std::optional<std::thread> playbackThread2;
+    if (res2 != "") {
+        argsP2 = {.mSessionId = sessionId2, .mRes = res2};
+        playbackThread2 = std::thread(&PlaybackEnv::play, &argsP2);
+    }
     std::thread captureThread(&CaptureEnv::capture, &argsR);
     captureThread.join();
     playbackThread.join();
+    if (playbackThread2 != std::nullopt) {
+        playbackThread2->join();
+    }
     ASSERT_EQ(OK, argsR.mStatus) << argsR.mMsg;
     ASSERT_EQ(OK, argsP.mStatus) << argsP.mMsg;
     ASSERT_FALSE(argsR.mDumpFileName.empty()) << "recorded not written to file";
@@ -210,7 +228,11 @@
         auto k = binOffsets[i];
         auto outputMag = sqrt((fftOutput[k * 2] * fftOutput[k * 2]) +
                               (fftOutput[k * 2 + 1] * fftOutput[k * 2 + 1]));
-        gaindB[i] = 20 * log10(outputMag / inputMag[i]);
+        if (inputMag == nullptr) {
+            gaindB[i] = 20 * log10(outputMag);
+        } else {
+            gaindB[i] = 20 * log10(outputMag / inputMag[i]);
+        }
     }
 }
 
@@ -282,7 +304,7 @@
         inputMag[i] = sqrt((fftInput[k * 2] * fftInput[k * 2]) +
                            (fftInput[k * 2 + 1] * fftInput[k * 2 + 1]));
     }
-    TemporaryFile tf("/data/local/tmp");
+    TemporaryFile tf(kDataTempPath);
     close(tf.release());
     std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
     fout.write((char*)input.data(), input.size() * sizeof(input[0]));
@@ -386,7 +408,7 @@
         inputMag[i] = sqrt((fftInput[k * 2] * fftInput[k * 2]) +
                            (fftInput[k * 2 + 1] * fftInput[k * 2 + 1]));
     }
-    TemporaryFile tf("/data/local/tmp");
+    TemporaryFile tf(kDataTempPath);
     close(tf.release());
     std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
     fout.write((char*)input.data(), input.size() * sizeof(input[0]));
@@ -396,7 +418,7 @@
     memset(gainWithOutFilter, 0, sizeof(gainWithOutFilter));
     ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT, binOffsets,
                                                       inputMag, gainWithOutFilter, tf.path,
-                                                      AUDIO_SESSION_OUTPUT_MIX));
+                                                      AUDIO_SESSION_NONE));
     float diffA = gainWithOutFilter[0] - gainWithOutFilter[1];
     float prevGain = -100.f;
     for (auto strength = 150; strength < 1000; strength += strengthSupported ? 150 : 1000) {
@@ -421,6 +443,56 @@
     }
 }
 
+// assert the silent audio session with effect does not override the output audio
+TEST(AudioEffectTest, SilentAudioEffectSessionNotOverrideOutput) {
+    audio_session_t sessionId =
+            (audio_session_t)AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    sp<AudioEffect> bassboost = createEffect(SL_IID_BASSBOOST, sessionId);
+    if ((bassboost->descriptor().flags & EFFECT_FLAG_HW_ACC_MASK) != 0) {
+        GTEST_SKIP() << "effect processed output inaccessible, skipping test";
+    }
+    ASSERT_EQ(OK, bassboost->initCheck());
+    ASSERT_EQ(NO_ERROR, bassboost->setEnabled(true));
+
+    const auto bin = roundToFreqCenteredToFftBin(kBinWidth, kTestFrequency);
+    const int binIndex = std::get<0 /* index */>(bin);
+    const int binFrequency = std::get<1 /* freq */>(bin);
+
+    const int totalFrameCount = kSamplingFrequency * kPlayBackDurationSec;
+    // input for effect module
+    auto silentAudio = pffft::AlignedVector<float>(totalFrameCount);
+    auto input = pffft::AlignedVector<float>(totalFrameCount);
+    generateMultiTone({binFrequency}, kSamplingFrequency, kPlayBackDurationSec, kDefAmplitude,
+                      input.data(), totalFrameCount);
+    TemporaryFile tf(kDataTempPath);
+    close(tf.release());
+    std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
+    fout.write((char*)input.data(), input.size() * sizeof(input[0]));
+    fout.close();
+
+    // play non-silent audio file on AUDIO_SESSION_NONE
+    float audioGain, audioPlusSilentEffectGain;
+    ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT, {binIndex},
+                                                      nullptr, &audioGain, tf.path,
+                                                      AUDIO_SESSION_NONE));
+    EXPECT_FALSE(std::isinf(audioGain)) << "output gain should not be -inf";
+
+    TemporaryFile silentFile(kDataTempPath);
+    close(silentFile.release());
+    std::ofstream fSilent(silentFile.path, std::ios::out | std::ios::binary);
+    fSilent.write((char*)silentAudio.data(), silentAudio.size() * sizeof(silentAudio[0]));
+    fSilent.close();
+    // play non-silent audio file on AUDIO_SESSION_NONE and silent audio on sessionId, expect
+    // the new output gain to be almost same as last playback
+    ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(
+            kCaptureDurationSec, kNPointFFT, {binIndex}, nullptr, &audioPlusSilentEffectGain,
+            tf.path, AUDIO_SESSION_NONE, silentFile.path, sessionId));
+    EXPECT_FALSE(std::isinf(audioPlusSilentEffectGain))
+            << "output might have been overwritten in effect accumulate mode";
+    EXPECT_NEAR(audioGain, audioPlusSilentEffectGain, kAudioGainDiffTolerancedB)
+            << " output gain should almost same with one more silent audio stream";
+}
+
 int main(int argc, char** argv) {
     android::ProcessState::self()->startThreadPool();
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libaudioclient/tests/audioeffect_tests.cpp b/media/libaudioclient/tests/audioeffect_tests.cpp
index 59d0c6a..bedeff9 100644
--- a/media/libaudioclient/tests/audioeffect_tests.cpp
+++ b/media/libaudioclient/tests/audioeffect_tests.cpp
@@ -70,8 +70,8 @@
     return effect;
 }
 
-status_t isEffectExistsOnAudioSession(const effect_uuid_t* type, const effect_uuid_t* uuid,
-                                      int priority, audio_session_t sessionId) {
+status_t createAndInitCheckEffect(const effect_uuid_t* type, const effect_uuid_t* uuid,
+                                  int priority, audio_session_t sessionId) {
     sp<AudioEffect> effect = createEffect(type, uuid, priority, sessionId);
     return effect->initCheck();
 }
@@ -272,10 +272,9 @@
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
-    EXPECT_EQ(NO_ERROR,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+    EXPECT_EQ(NO_ERROR, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                 kDefaultInputEffectPriority - 1,
+                                                 capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should not have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -296,9 +295,9 @@
                                         capture->getAudioRecordHandle()))
             << "Effect should have been default on record. " << type;
     EXPECT_EQ(ALREADY_EXISTS,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+              createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                       kDefaultInputEffectPriority - 1,
+                                       capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -313,10 +312,9 @@
     EXPECT_FALSE(isEffectDefaultOnRecord(selectedEffectType, selectedEffectUuid,
                                          capture->getAudioRecordHandle()))
             << "Effect should not have been default on record. " << type;
-    EXPECT_EQ(NO_ERROR,
-              isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                           kDefaultInputEffectPriority - 1,
-                                           capture->getAudioRecordHandle()->getSessionId()))
+    EXPECT_EQ(NO_ERROR, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                 kDefaultInputEffectPriority - 1,
+                                                 capture->getAudioRecordHandle()->getSessionId()))
             << "Effect should not have been added. " << type;
     EXPECT_EQ(OK, capture->audioProcess());
     EXPECT_EQ(OK, capture->stop());
@@ -421,8 +419,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
     EXPECT_EQ(compatCheck ? NO_ERROR : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should not have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
@@ -445,8 +443,8 @@
     EXPECT_EQ(NO_ERROR, playback->start());
     // If effect chosen is not compatible with the session, then effect won't be applied
     EXPECT_EQ(compatCheck ? ALREADY_EXISTS : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     if (mSelectFastMode) {
@@ -467,8 +465,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
     EXPECT_EQ(compatCheck ? NO_ERROR : NO_INIT,
-              isEffectExistsOnAudioSession(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
-                                           playback->getAudioTrackHandle()->getSessionId()))
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1,
+                                       playback->getAudioTrackHandle()->getSessionId()))
             << "Effect should not have been added. " << mTypeStr;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
@@ -502,8 +500,8 @@
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
 
-    EXPECT_EQ(ALREADY_EXISTS, isEffectExistsOnAudioSession(
-                                      &mType, &mUuid, kDefaultOutputEffectPriority - 1, sessionId))
+    EXPECT_EQ(ALREADY_EXISTS,
+              createAndInitCheckEffect(&mType, &mUuid, kDefaultOutputEffectPriority - 1, sessionId))
             << "Effect should have been added. " << mTypeStr;
     if (mSelectFastMode) {
         EXPECT_EQ(mIsFastCompatibleEffect ? AUDIO_OUTPUT_FLAG_FAST : 0,
@@ -556,8 +554,8 @@
     ASSERT_EQ(NO_ERROR, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"));
     EXPECT_EQ(NO_ERROR, playback->create());
     EXPECT_EQ(NO_ERROR, playback->start());
-    EXPECT_TRUE(isEffectExistsOnAudioSession(selectedEffectType, selectedEffectUuid,
-                                             kDefaultOutputEffectPriority - 1, sessionId))
+    ASSERT_EQ(ALREADY_EXISTS, createAndInitCheckEffect(selectedEffectType, selectedEffectUuid,
+                                                       kDefaultOutputEffectPriority - 1, sessionId))
             << "Effect should have been added. " << type;
     EXPECT_EQ(NO_ERROR, playback->waitForConsumption());
     playback->stop();
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index be6c581..f2fee8b 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -102,7 +102,10 @@
     }
 
     void TearDown() override {
-        if (mAC) ASSERT_EQ(OK, mAC->stop());
+        if (mAC) {
+            ASSERT_EQ(OK, mAC->stop());
+            mAC.clear();
+        }
     }
 };
 
@@ -120,10 +123,12 @@
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->addAudioDeviceCallback(cb));
     EXPECT_EQ(OK, mAC->start()) << "record creation failed";
     EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, cbOld->mAudioIo);
-    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
+    const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
+    const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
     EXPECT_EQ(BAD_VALUE, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cbOld));
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cb));
@@ -166,31 +171,33 @@
 }
 
 TEST_F(AudioRecordTest, TestGetSetMarker) {
-    mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
-    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition))
+    mAC->setMarkerPosition((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->getMarkerPosition()))
             << "setMarkerPosition() failed";
     uint32_t marker;
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker))
             << "getMarkerPosition() failed";
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
     EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
-    EXPECT_EQ(marker, mAC->mMarkerPosition)
+    EXPECT_EQ(marker, mAC->getMarkerPosition())
             << "configured marker and received marker are different";
-    EXPECT_EQ(mAC->mReceivedCbMarkerAtPosition, mAC->mMarkerPosition)
+    EXPECT_EQ(mAC->waitAndGetReceivedCbMarkerAtPosition(), mAC->getMarkerPosition())
             << "configured marker and received cb marker are different";
 }
 
 TEST_F(AudioRecordTest, TestGetSetMarkerPeriodical) {
-    mAC->mMarkerPeriod = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
-    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPeriod))
+    mAC->setMarkerPeriod((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->getMarkerPeriod()))
             << "setPositionUpdatePeriod() failed";
     uint32_t marker;
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker))
             << "getPositionUpdatePeriod() failed";
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
     EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
-    EXPECT_EQ(marker, mAC->mMarkerPeriod) << "configured marker and received marker are different";
-    EXPECT_EQ(mAC->mReceivedCbMarkerCount, mAC->mNumFramesToRecord / mAC->mMarkerPeriod)
+    EXPECT_EQ(marker, mAC->getMarkerPeriod())
+            << "configured marker and received marker are different";
+    EXPECT_EQ(mAC->waitAndGetReceivedCbMarkerCount(),
+              mAC->mNumFramesToRecord / mAC->getMarkerPeriod())
             << "configured marker and received cb marker are different";
 }
 
@@ -217,12 +224,12 @@
         EXPECT_EQ(mSessionId, mAC->getAudioRecordHandle()->getSessionId());
     if (mTransferType != AudioRecord::TRANSFER_CALLBACK) {
         uint32_t marker;
-        mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
+        mAC->setMarkerPosition((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
         EXPECT_EQ(INVALID_OPERATION,
-                  mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition));
+                  mAC->getAudioRecordHandle()->setMarkerPosition(mAC->getMarkerPosition()));
         EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker));
         EXPECT_EQ(INVALID_OPERATION,
-                  mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPosition));
+                  mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->getMarkerPosition()));
         EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker));
     }
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 3b2285e..a3ab9d2 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -64,16 +64,17 @@
         EXPECT_EQ(OK, ap->start()) << "audio track start failed";
         EXPECT_EQ(OK, ap->onProcess());
         EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-        EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
+        const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+        EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
         EXPECT_NE(0, ap->getAudioTrackHandle()->getFlags() & output_flags[i]);
         audio_patch patch;
-        EXPECT_EQ(OK, getPatchForOutputMix(cb->mAudioIo, patch));
+        EXPECT_EQ(OK, getPatchForOutputMix(audioIo, patch));
         if (output_flags[i] != AUDIO_OUTPUT_FLAG_FAST) {
             // A "normal" output can still have a FastMixer, depending on the buffer size.
             // Thus, a fast track can be created on a mix port which does not have the FAST flag.
             for (auto j = 0; j < patch.num_sources; j++) {
                 if (patch.sources[j].type == AUDIO_PORT_TYPE_MIX &&
-                    patch.sources[j].ext.mix.handle == cb->mAudioIo) {
+                    patch.sources[j].ext.mix.handle == audioIo) {
                     SCOPED_TRACE(dumpPortConfig(patch.sources[j]));
                     EXPECT_NE(0, patch.sources[j].flags.output & output_flags[i])
                             << "expected output flag "
@@ -85,7 +86,18 @@
     }
 }
 
-TEST(AudioTrackTest, DefaultRoutingTest) {
+class AudioTrackTest
+        : public ::testing::TestWithParam<int> {
+
+public:
+    AudioTrackTest()
+            : mSampleRate(GetParam()){};
+
+    const uint32_t mSampleRate;
+
+};
+
+TEST_P(AudioTrackTest, DefaultRoutingTest) {
     audio_port_v7 port;
     if (OK != getPortByAttributes(AUDIO_PORT_ROLE_SOURCE, AUDIO_PORT_TYPE_DEVICE,
                                   AUDIO_DEVICE_IN_REMOTE_SUBMIX, "0", port)) {
@@ -94,7 +106,8 @@
 
     // create record instance
     sp<AudioCapture> capture = sp<AudioCapture>::make(
-            AUDIO_SOURCE_REMOTE_SUBMIX, 48000, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO);
+            AUDIO_SOURCE_REMOTE_SUBMIX, mSampleRate, AUDIO_FORMAT_PCM_16_BIT,
+            AUDIO_CHANNEL_IN_STEREO);
     ASSERT_NE(nullptr, capture);
     ASSERT_EQ(OK, capture->create()) << "record creation failed";
     sp<OnAudioDeviceUpdateNotifier> cbCapture = sp<OnAudioDeviceUpdateNotifier>::make();
@@ -102,7 +115,7 @@
 
     // create playback instance
     sp<AudioPlayback> playback = sp<AudioPlayback>::make(
-            48000 /* sampleRate */, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+            mSampleRate, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
             AUDIO_OUTPUT_FLAG_NONE, AUDIO_SESSION_NONE);
     ASSERT_NE(nullptr, playback);
     ASSERT_EQ(OK, playback->loadResource("/data/local/tmp/bbb_2ch_24kHz_s16le.raw"))
@@ -132,6 +145,12 @@
     playback->stop();
 }
 
+INSTANTIATE_TEST_SUITE_P(
+        AudioTrackParameterizedTest,
+        AudioTrackTest,
+        ::testing::Values(44100, 48000)
+);
+
 class AudioRoutingTest : public ::testing::Test {
   public:
     void SetUp() override {
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index 03c15f4..742ca48 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -108,30 +108,32 @@
 // UNIT TESTS
 TEST_F(AudioSystemTest, CheckServerSideValues) {
     ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    EXPECT_GT(mAF->sampleRate(mCbPlayback->mAudioIo), 0);
-    EXPECT_NE(mAF->format(mCbPlayback->mAudioIo), AUDIO_FORMAT_INVALID);
-    EXPECT_GT(mAF->frameCount(mCbPlayback->mAudioIo), 0);
+    const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevice();
+    EXPECT_GT(mAF->sampleRate(pbAudioIo), 0);
+    EXPECT_NE(mAF->format(pbAudioIo), AUDIO_FORMAT_INVALID);
+    EXPECT_GT(mAF->frameCount(pbAudioIo), 0);
     size_t frameCountHal, frameCountHalCache;
-    frameCountHal = mAF->frameCountHAL(mCbPlayback->mAudioIo);
+    frameCountHal = mAF->frameCountHAL(pbAudioIo);
     EXPECT_GT(frameCountHal, 0);
-    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(mCbPlayback->mAudioIo, &frameCountHalCache));
+    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(pbAudioIo, &frameCountHalCache));
     EXPECT_EQ(frameCountHal, frameCountHalCache);
-    EXPECT_GT(mAF->latency(mCbPlayback->mAudioIo), 0);
+    EXPECT_GT(mAF->latency(pbAudioIo), 0);
     // client side latency is at least server side latency
-    EXPECT_LE(mAF->latency(mCbPlayback->mAudioIo), mPlayback->getAudioTrackHandle()->latency());
+    EXPECT_LE(mAF->latency(pbAudioIo), mPlayback->getAudioTrackHandle()->latency());
 
     ASSERT_NO_FATAL_FAILURE(createRecordSession());
-    EXPECT_GT(mAF->sampleRate(mCbRecord->mAudioIo), 0);
-    // EXPECT_NE(mAF->format(mCbRecord->mAudioIo), AUDIO_FORMAT_INVALID);
-    EXPECT_GT(mAF->frameCount(mCbRecord->mAudioIo), 0);
-    EXPECT_GT(mAF->frameCountHAL(mCbRecord->mAudioIo), 0);
-    frameCountHal = mAF->frameCountHAL(mCbRecord->mAudioIo);
+    const auto [recAudioIo, __] = mCbRecord->getLastPortAndDevice();
+    EXPECT_GT(mAF->sampleRate(recAudioIo), 0);
+    // EXPECT_NE(mAF->format(recAudioIo), AUDIO_FORMAT_INVALID);
+    EXPECT_GT(mAF->frameCount(recAudioIo), 0);
+    EXPECT_GT(mAF->frameCountHAL(recAudioIo), 0);
+    frameCountHal = mAF->frameCountHAL(recAudioIo);
     EXPECT_GT(frameCountHal, 0);
-    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(mCbRecord->mAudioIo, &frameCountHalCache));
+    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(recAudioIo, &frameCountHalCache));
     EXPECT_EQ(frameCountHal, frameCountHalCache);
-    // EXPECT_GT(mAF->latency(mCbRecord->mAudioIo), 0);
+    // EXPECT_GT(mAF->latency(recAudioIo), 0);
     // client side latency is at least server side latency
-    // EXPECT_LE(mAF->latency(mCbRecord->mAudioIo), mCapture->getAudioRecordHandle()->latency());
+    // EXPECT_LE(mAF->latency(recAudioIo), mCapture->getAudioRecordHandle()->latency());
 
     EXPECT_GT(AudioSystem::getPrimaryOutputSamplingRate(), 0);  // first fast mixer sample rate
     EXPECT_GT(AudioSystem::getPrimaryOutputFrameCount(), 0);    // fast mixer frame count
@@ -197,19 +199,6 @@
     EXPECT_EQ(origBalance, tstBalance);
 }
 
-TEST_F(AudioSystemTest, GetStreamVolume) {
-    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    float origStreamVol;
-    EXPECT_EQ(NO_ERROR, AudioSystem::getStreamVolume(AUDIO_STREAM_MUSIC, &origStreamVol,
-                                                     mCbPlayback->mAudioIo));
-}
-
-TEST_F(AudioSystemTest, GetStreamMute) {
-    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    bool origMuteState;
-    EXPECT_EQ(NO_ERROR, AudioSystem::getStreamMute(AUDIO_STREAM_MUSIC, &origMuteState));
-}
-
 TEST_F(AudioSystemTest, StartAndStopAudioSource) {
     std::vector<struct audio_port_v7> ports;
     audio_port_config sourcePortConfig;
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index cb667a0..cf7d926 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -157,18 +157,20 @@
     EXPECT_EQ(OK, ap->start()) << "audio track start failed";
     EXPECT_EQ(OK, ap->onProcess());
     EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, cbOld->mAudioIo);
-    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
-    EXPECT_EQ(cb->mAudioIo, ap->getAudioTrackHandle()->getOutput());
-    EXPECT_EQ(cb->mDeviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
+    const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
+    const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
+    EXPECT_EQ(audioIo, ap->getAudioTrackHandle()->getOutput());
+    EXPECT_EQ(deviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
     String8 keys;
     keys = ap->getAudioTrackHandle()->getParameters(keys);
     if (!keys.empty()) {
         std::cerr << "track parameters :: " << keys << std::endl;
     }
-    EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
+    EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
     EXPECT_EQ(BAD_VALUE, ap->getAudioTrackHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cbOld));
     EXPECT_EQ(OK, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cb));
diff --git a/media/libaudioclient/tests/trackplayerbase_tests.cpp b/media/libaudioclient/tests/trackplayerbase_tests.cpp
index 7317bf0..a4dba9b 100644
--- a/media/libaudioclient/tests/trackplayerbase_tests.cpp
+++ b/media/libaudioclient/tests/trackplayerbase_tests.cpp
@@ -54,7 +54,7 @@
         mPlayer = new TrackPlayer();
         mPlayer->init(track.get(), mPlayer, PLAYER_TYPE_AAUDIO, AUDIO_USAGE_MEDIA,
                       AUDIO_SESSION_NONE);
-        sp<AudioTrack> playerTrack = mPlayer->mAudioTrack;
+        sp<AudioTrack> playerTrack = mPlayer->getAudioTrack();
         ASSERT_EQ(playerTrack->initCheck(), NO_ERROR);
 
         mBufferSize = mFrameCount * playerTrack->frameSize();
@@ -74,7 +74,7 @@
 
     void playBuffer() {
         bool blocking = true;
-        ssize_t nbytes = mPlayer->mAudioTrack->write(mBuffer.data(), mBufferSize, blocking);
+        ssize_t nbytes = mPlayer->getAudioTrack()->write(mBuffer.data(), mBufferSize, blocking);
         EXPECT_EQ(nbytes, mBufferSize) << "Did not write all data in blocking mode";
     }
 
diff --git a/media/libaudiohal/Android.bp b/media/libaudiohal/Android.bp
index 639c7aa..75e2c11 100644
--- a/media/libaudiohal/Android.bp
+++ b/media/libaudiohal/Android.bp
@@ -45,6 +45,8 @@
         "liberror_headers",
         "libmediautils_headers",
     ],
+
+    export_include_dirs: ["include"],
 }
 
 cc_library_shared {
diff --git a/media/libaudiohal/impl/Android.bp b/media/libaudiohal/impl/Android.bp
index dd8f021..1a6b949 100644
--- a/media/libaudiohal/impl/Android.bp
+++ b/media/libaudiohal/impl/Android.bp
@@ -227,11 +227,11 @@
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
         "latest_android_media_audio_common_types_ndk_shared",
+        "latest_av_audio_types_aidl_ndk_shared",
     ],
     shared_libs: [
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
-        "av-audio-types-aidl-V1-ndk",
         "libaudio_aidl_conversion_common_cpp",
         "libaudio_aidl_conversion_common_ndk",
         "libaudio_aidl_conversion_common_ndk_cpp",
diff --git a/media/libaudiohal/impl/ConversionHelperAidl.h b/media/libaudiohal/impl/ConversionHelperAidl.h
index 0fadd9c..fe00fb2 100644
--- a/media/libaudiohal/impl/ConversionHelperAidl.h
+++ b/media/libaudiohal/impl/ConversionHelperAidl.h
@@ -32,6 +32,28 @@
 
 namespace android {
 
+/*
+ * Helper macro to add instance name, function name in logs
+ * classes should provide getInstanceName API to use these macros.
+ * print function names along with instance name.
+ *
+ * Usage:
+ *  AUGMENT_LOG(D);
+ *  AUGMENT_LOG(I, "hello!");
+ *  AUGMENT_LOG(W, "value: %d", value);
+ *
+ *  AUGMENT_LOG_IF(D, value < 0, "negative");
+ *  AUGMENT_LOG_IF(E, value < 0, "bad value: %d", value);
+ */
+
+#define AUGMENT_LOG(level, ...)                                                  \
+    ALOG##level("[%s] %s" __VA_OPT__(": " __android_second(0, __VA_ARGS__, "")), \
+                getInstanceName().c_str(), __func__ __VA_OPT__(__android_rest(__VA_ARGS__)))
+
+#define AUGMENT_LOG_IF(level, cond, ...)                                                    \
+    ALOG##level##_IF(cond, "[%s] %s" __VA_OPT__(": " __android_second(0, __VA_ARGS__, "")), \
+                     getInstanceName().c_str(), __func__ __VA_OPT__(__android_rest(__VA_ARGS__)))
+
 class Args {
   public:
     explicit Args(const Vector<String16>& args)
@@ -49,13 +71,15 @@
 
 class ConversionHelperAidl {
   protected:
-    ConversionHelperAidl(std::string_view className) : mClassName(className) {}
+    ConversionHelperAidl(std::string_view className, std::string_view instanceName)
+        : mClassName(className), mInstanceName(instanceName) {}
 
-    const std::string& getClassName() const {
-        return mClassName;
-    }
+    const std::string& getClassName() const { return mClassName; }
+
+    const std::string& getInstanceName() const { return mInstanceName; }
 
     const std::string mClassName;
+    const std::string mInstanceName;
 };
 
 // 'action' must accept a value of type 'T' and return 'status_t'.
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 032533c..629cd7c 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -22,15 +22,20 @@
 #include <aidl/android/hardware/audio/core/BnStreamCallback.h>
 #include <aidl/android/hardware/audio/core/BnStreamOutEventCallback.h>
 #include <aidl/android/hardware/audio/core/StreamDescriptor.h>
+#include <android/binder_ibinder_platform.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
 #include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <mediautils/TimeCheck.h>
 #include <system/audio.h>
+#include <system/thread_defs.h>
+
 #include <Utils.h>
 #include <utils/Log.h>
 
+#include "AidlUtils.h"
 #include "DeviceHalAidl.h"
 #include "EffectHalAidl.h"
 #include "StreamHalAidl.h"
@@ -60,6 +65,8 @@
 using aidl::android::hardware::audio::common::isBitPositionFlagSet;
 using aidl::android::hardware::audio::common::kDumpFromAudioServerArgument;
 using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
 using aidl::android::hardware::audio::core::sounddose::ISoundDose;
 using aidl::android::hardware::audio::core::AudioPatch;
 using aidl::android::hardware::audio::core::AudioRoute;
@@ -71,6 +78,18 @@
 using aidl::android::hardware::audio::core::ModuleDebug;
 using aidl::android::hardware::audio::core::VendorParameter;
 
+#define RETURN_IF_MODULE_NOT_INIT(retVal)         \
+    if (mModule == nullptr) {                     \
+        AUGMENT_LOG(E, "module not initialized"); \
+        return retVal;                            \
+    }
+
+#define RETURN_IF_TELEPHONY_NOT_INIT(retVal)         \
+    if (mTelephony == nullptr) {                     \
+        AUGMENT_LOG(E, "telephony not initialized"); \
+        return retVal;                               \
+    }
+
 namespace android {
 
 namespace {
@@ -103,15 +122,16 @@
 
 DeviceHalAidl::DeviceHalAidl(const std::string& instance, const std::shared_ptr<IModule>& module,
                              const std::shared_ptr<IHalAdapterVendorExtension>& vext)
-        : ConversionHelperAidl("DeviceHalAidl"),
-          mInstance(instance), mModule(module), mVendorExt(vext),
-          mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
-          mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
-          mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
-          mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)),
-          mSoundDose(retrieveSubInterface<ISoundDose>(module, &IModule::getSoundDose)),
-          mMapper(instance, module), mMapperAccessor(mMapper, mLock) {
-}
+    : ConversionHelperAidl("DeviceHalAidl", instance),
+      mModule(module),
+      mVendorExt(vext),
+      mTelephony(retrieveSubInterface<ITelephony>(module, &IModule::getTelephony)),
+      mBluetooth(retrieveSubInterface<IBluetooth>(module, &IModule::getBluetooth)),
+      mBluetoothA2dp(retrieveSubInterface<IBluetoothA2dp>(module, &IModule::getBluetoothA2dp)),
+      mBluetoothLe(retrieveSubInterface<IBluetoothLe>(module, &IModule::getBluetoothLe)),
+      mSoundDose(retrieveSubInterface<ISoundDose>(module, &IModule::getSoundDose)),
+      mMapper(instance, module),
+      mMapperAccessor(mMapper, mLock) {}
 
 status_t DeviceHalAidl::getAudioPorts(std::vector<media::audio::common::AudioPort> *ports) {
     std::lock_guard l(mLock);
@@ -124,11 +144,13 @@
 }
 
 status_t DeviceHalAidl::getSupportedModes(std::vector<media::audio::common::AudioMode> *modes) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    if (mTelephony == nullptr) return INVALID_OPERATION;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    RETURN_IF_TELEPHONY_NOT_INIT(INVALID_OPERATION);
+
     if (modes == nullptr) {
+        AUGMENT_LOG(E, "uninitialized modes");
         return BAD_VALUE;
     }
     std::vector<AudioMode> aidlModes;
@@ -146,48 +168,53 @@
 }
 
 status_t DeviceHalAidl::initCheck() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     std::lock_guard l(mLock);
     return mMapper.initialize();
 }
 
 status_t DeviceHalAidl::setVoiceVolume(float volume) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "volume %f", volume);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    if (mTelephony == nullptr) return INVALID_OPERATION;
-    ITelephony::TelecomConfig inConfig{ .voiceVolume = Float{volume} }, outConfig;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    RETURN_IF_TELEPHONY_NOT_INIT(INVALID_OPERATION);
+
+    ITelephony::TelecomConfig inConfig{.voiceVolume = Float{volume}}, outConfig;
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(mTelephony->setTelecomConfig(inConfig, &outConfig)));
-    ALOGW_IF(outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
-            "%s: the resulting voice volume %f is not the same as requested %f",
-            __func__, outConfig.voiceVolume.value().value, volume);
+    AUGMENT_LOG_IF(
+            W, outConfig.voiceVolume.has_value() && volume != outConfig.voiceVolume.value().value,
+            "the resulting voice volume %f is not the same as requested %f",
+            outConfig.voiceVolume.value().value, volume);
     return OK;
 }
 
 status_t DeviceHalAidl::setMasterVolume(float volume) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "volume %f", volume);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     return statusTFromBinderStatus(mModule->setMasterVolume(volume));
 }
 
 status_t DeviceHalAidl::getMasterVolume(float *volume) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (volume == nullptr) {
+        AUGMENT_LOG(E, "uninitialized volumes");
         return BAD_VALUE;
     }
     return statusTFromBinderStatus(mModule->getMasterVolume(volume));
 }
 
 status_t DeviceHalAidl::setMode(audio_mode_t mode) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "mode %d", mode);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     AudioMode audioMode = VALUE_OR_FATAL(::aidl::android::legacy2aidl_audio_mode_t_AudioMode(mode));
     if (mTelephony != nullptr) {
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mTelephony->switchAudioMode(audioMode)));
@@ -196,90 +223,99 @@
 }
 
 status_t DeviceHalAidl::setMicMute(bool state) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "mute %d", state);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     return statusTFromBinderStatus(mModule->setMicMute(state));
 }
 
 status_t DeviceHalAidl::getMicMute(bool *state) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (state == nullptr) {
+        AUGMENT_LOG(E, "uninitialized mute state");
         return BAD_VALUE;
     }
     return statusTFromBinderStatus(mModule->getMicMute(state));
 }
 
 status_t DeviceHalAidl::setMasterMute(bool state) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "mute %d", state);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     return statusTFromBinderStatus(mModule->setMasterMute(state));
 }
 
 status_t DeviceHalAidl::getMasterMute(bool *state) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (state == nullptr) {
+        AUGMENT_LOG(E, "uninitialized mute state");
         return BAD_VALUE;
     }
     return statusTFromBinderStatus(mModule->getMasterMute(state));
 }
 
 status_t DeviceHalAidl::setParameters(const String8& kvPairs) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     AudioParameter parameters(kvPairs);
-    ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+    AUGMENT_LOG(D, "parameters: \"%s\"", parameters.toString().c_str());
 
     if (status_t status = filterAndUpdateBtA2dpParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating BT A2DP parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateBtA2dpParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateBtHfpParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating BT HFP parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateBtHfpParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateBtLeParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating BT LE parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateBtLeParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateBtScoParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating BT SCO parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateBtScoParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateScreenParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating screen parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateScreenParameters failed: %d", status);
     }
     if (status_t status = filterAndUpdateTelephonyParameters(parameters); status != OK) {
-        ALOGW("%s: filtering or updating telephony parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndUpdateTelephonyParameters failed: %d", status);
     }
     return parseAndSetVendorParameters(mVendorExt, mModule, parameters);
 }
 
 status_t DeviceHalAidl::getParameters(const String8& keys, String8 *values) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "keys: \"%s\"", keys.c_str());
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (values == nullptr) {
+        AUGMENT_LOG(E, "invalid values");
         return BAD_VALUE;
     }
     AudioParameter parameterKeys(keys), result;
     if (status_t status = filterAndRetrieveBtA2dpParameters(parameterKeys, &result); status != OK) {
-        ALOGW("%s: filtering or retrieving BT A2DP parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndRetrieveBtA2dpParameters failed: %d", status);
     }
     if (status_t status = filterAndRetrieveBtLeParameters(parameterKeys, &result); status != OK) {
-        ALOGW("%s: filtering or retrieving BT LE parameters failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filterAndRetrieveBtLeParameters failed: %d", status);
     }
     *values = result.toString();
     return parseAndGetVendorParameters(mVendorExt, mModule, parameterKeys, values);
 }
 
 status_t DeviceHalAidl::getInputBufferSize(struct audio_config* config, size_t* size) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (config == nullptr || size == nullptr) {
+        AUGMENT_LOG(E, "invalid config or size");
         return BAD_VALUE;
     }
     constexpr bool isInput = true;
@@ -387,7 +423,7 @@
         return runCb([](CbRef cb) { cb->onWriteReady(); });
     }
     ndk::ScopedAStatus onError() override {
-        return runCb([](CbRef cb) { cb->onError(); });
+        return runCb([](CbRef cb) { cb->onError(true /*isHardError*/); });
     }
     ndk::ScopedAStatus onDrainReady() override {
         return runCb([](CbRef cb) { cb->onDrainReady(); });
@@ -427,11 +463,14 @@
         audio_io_handle_t handle, audio_devices_t devices,
         audio_output_flags_t flags, struct audio_config* config,
         const char* address,
-        sp<StreamOutHalInterface>* outStream) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+        sp<StreamOutHalInterface>* outStream,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
+    AUGMENT_LOG(D, "handle: %d devices %0x flags %0x", handle, devices, flags);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (outStream == nullptr || config == nullptr) {
+        AUGMENT_LOG(E, "invalid outStream or config");
         return BAD_VALUE;
     }
     constexpr bool isInput = false;
@@ -443,9 +482,12 @@
             ::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
     int32_t aidlOutputFlags = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    SourceMetadata aidlMetadata = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_playback_track_metadata_v7_SourceMetadata(sourceMetadata));
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
     AudioPortConfig mixPortConfig;
     AudioPatch aidlPatch;
+
     Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
     {
         std::lock_guard l(mLock);
@@ -460,23 +502,35 @@
     args.portConfigId = mixPortConfig.id;
     const bool isOffload = isBitPositionFlagSet(
             aidlOutputFlags, AudioOutputFlags::COMPRESS_OFFLOAD);
+    const bool isHwAvSync = isBitPositionFlagSet(
+            aidlOutputFlags, AudioOutputFlags::HW_AV_SYNC);
     std::shared_ptr<OutputStreamCallbackAidl> streamCb;
     if (isOffload) {
         streamCb = ndk::SharedRefBase::make<OutputStreamCallbackAidl>(this);
+        ndk::SpAIBinder binder = streamCb->asBinder();
+        AIBinder_setMinSchedulerPolicy(binder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+        AIBinder_setInheritRt(binder.get(), true);
     }
     auto eventCb = ndk::SharedRefBase::make<OutputStreamEventCallbackAidl>(this);
-    if (isOffload) {
+    ndk::SpAIBinder binder = eventCb->asBinder();
+    AIBinder_setMinSchedulerPolicy(binder.get(), SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
+    AIBinder_setInheritRt(binder.get(), true);
+
+    if (isOffload || isHwAvSync) {
         args.offloadInfo = aidlConfig.offloadInfo;
+    }
+    if (isOffload) {
         args.callback = streamCb;
     }
     args.bufferSizeFrames = aidlConfig.frameCount;
     args.eventCallback = eventCb;
+    args.sourceMetadata = aidlMetadata;
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
-    StreamContextAidl context(ret.desc, isOffload);
+    StreamContextAidl context(ret.desc, isOffload, aidlHandle);
     if (!context.isValid()) {
-        ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
-                __func__, ret.desc.toString().c_str());
+        AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
+                    ret.desc.toString().c_str());
         return NO_INIT;
     }
     auto stream = sp<StreamOutHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
@@ -484,8 +538,11 @@
     *outStream = stream;
     /* StreamOutHalInterface* */ void* cbCookie = (*outStream).get();
     {
-        std::lock_guard l(mLock);
+        std::lock_guard l(mCallbacksLock);
         mCallbacks.emplace(cbCookie, Callbacks{});
+    }
+    {
+        std::lock_guard l(mLock);
         mMapper.addStream(*outStream, mixPortConfig.id, aidlPatch.id);
     }
     if (streamCb) {
@@ -505,10 +562,11 @@
         const char* address, audio_source_t source,
         audio_devices_t outputDevice, const char* outputDeviceAddress,
         sp<StreamInHalInterface>* inStream) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "handle: %d devices %0x flags %0x", handle, devices, flags);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (inStream == nullptr || config == nullptr) {
+        AUGMENT_LOG(E, "invalid inStream or config");
         return BAD_VALUE;
     }
     constexpr bool isInput = true;
@@ -548,10 +606,10 @@
     args.bufferSizeFrames = aidlConfig.frameCount;
     ::aidl::android::hardware::audio::core::IModule::OpenInputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openInputStream(args, &ret)));
-    StreamContextAidl context(ret.desc, false /*isAsynchronous*/);
+    StreamContextAidl context(ret.desc, false /*isAsynchronous*/, aidlHandle);
     if (!context.isValid()) {
-        ALOGE("%s: Failed to created a valid stream context from the descriptor: %s",
-                __func__, ret.desc.toString().c_str());
+        AUGMENT_LOG(E, "Failed to created a valid stream context from the descriptor: %s",
+                    ret.desc.toString().c_str());
         return NO_INIT;
     }
     *inStream = sp<StreamInHalAidl>::make(*config, std::move(context), aidlPatch.latenciesMs[0],
@@ -565,7 +623,10 @@
 }
 
 status_t DeviceHalAidl::supportsAudioPatches(bool* supportsPatches) {
+    AUGMENT_LOG(V);
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (supportsPatches == nullptr) {
+        AUGMENT_LOG(E, "uninitialized supportsPatches");
         return BAD_VALUE;
     }
     *supportsPatches = true;
@@ -577,13 +638,20 @@
                                          unsigned int num_sinks,
                                          const struct audio_port_config* sinks,
                                          audio_patch_handle_t* patch) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "sources: %d sinks %d", num_sources, num_sinks);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    if (num_sinks > AUDIO_PATCH_PORTS_MAX || num_sources > AUDIO_PATCH_PORTS_MAX ||
-        sources == nullptr || sinks == nullptr || patch == nullptr) {
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+    if (num_sinks > AUDIO_PATCH_PORTS_MAX || num_sources > AUDIO_PATCH_PORTS_MAX) {
+        AUGMENT_LOG(E, "invalid sources %d or sinks %d ", num_sources, num_sinks);
         return BAD_VALUE;
     }
+
+    if (sources == nullptr || sinks == nullptr || patch == nullptr) {
+        AUGMENT_LOG(E, "uninitialized sources %d or sinks %d or patches %d", (sources == nullptr),
+                    (sinks == nullptr), (patch == nullptr));
+        return BAD_VALUE;
+    }
+
     // When the patch handle (*patch) is AUDIO_PATCH_HANDLE_NONE, it means
     // the framework wants to create a new patch. The handle has to be generated
     // by the HAL. Since handles generated this way can only be unique within
@@ -645,9 +713,10 @@
 }
 
 status_t DeviceHalAidl::releaseAudioPatch(audio_patch_handle_t patch) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D, "patch: %d", patch);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     static_assert(AUDIO_PATCH_HANDLE_NONE == 0);
     if (patch == AUDIO_PATCH_HANDLE_NONE) {
         return BAD_VALUE;
@@ -670,7 +739,10 @@
 }
 
 status_t DeviceHalAidl::getAudioPort(struct audio_port* port) {
+    AUGMENT_LOG(V);
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
+        AUGMENT_LOG(E, "port not initialized");
         return BAD_VALUE;
     }
     audio_port_v7 portV7;
@@ -680,10 +752,12 @@
 }
 
 status_t DeviceHalAidl::getAudioPort(struct audio_port_v7 *port) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
+        AUGMENT_LOG(E, "port not initialized");
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
@@ -691,8 +765,7 @@
     auto aidlPort = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
     if (aidlPort.ext.getTag() != AudioPortExt::device) {
-        ALOGE("%s: provided port is not a device port (module %s): %s",
-                __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        AUGMENT_LOG(E, "provided port is not a device port %s", aidlPort.toString().c_str());
         return BAD_VALUE;
     }
     const auto& matchDevice = aidlPort.ext.get<AudioPortExt::device>().device;
@@ -711,11 +784,13 @@
 
 status_t DeviceHalAidl::getAudioMixPort(const struct audio_port_v7 *devicePort,
                                         struct audio_port_v7 *mixPort) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    if (devicePort == nullptr || mixPort == nullptr ||
-            devicePort->type != AUDIO_PORT_TYPE_DEVICE || mixPort->type != AUDIO_PORT_TYPE_MIX) {
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
+    if (devicePort == nullptr || mixPort == nullptr || devicePort->type != AUDIO_PORT_TYPE_DEVICE ||
+        mixPort->type != AUDIO_PORT_TYPE_MIX) {
+        AUGMENT_LOG(E, "invalid device or mix port");
         return BAD_VALUE;
     }
     const int32_t aidlHandle = VALUE_OR_RETURN_STATUS(
@@ -733,10 +808,12 @@
 }
 
 status_t DeviceHalAidl::setAudioPortConfig(const struct audio_port_config* config) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (config == nullptr) {
+        AUGMENT_LOG(E, "config not initialized");
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -750,9 +827,10 @@
 }
 
 MicrophoneInfoProvider::Info const* DeviceHalAidl::getMicrophoneInfo() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (!mModule) return {};
+    RETURN_IF_MODULE_NOT_INIT({});
     std::lock_guard l(mLock);
     if (mMicrophones.status == Microphones::Status::UNKNOWN) {
         TIME_CHECK();
@@ -764,7 +842,7 @@
         } else if (status == INVALID_OPERATION) {
             mMicrophones.status = Microphones::Status::NOT_SUPPORTED;
         } else {
-            ALOGE("%s: Unexpected status from 'IModule.getMicrophones': %d", __func__, status);
+            AUGMENT_LOG(E, "Unexpected status from HAL: %d", status);
             return {};
         }
     }
@@ -776,10 +854,12 @@
 
 status_t DeviceHalAidl::getMicrophones(
         std::vector<audio_microphone_characteristic_t>* microphones) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (microphones == nullptr) {
+        AUGMENT_LOG(E, "microphones not initialized");
         return BAD_VALUE;
     }
     auto staticInfo = getMicrophoneInfo();
@@ -798,10 +878,12 @@
 
 status_t DeviceHalAidl::addDeviceEffect(
         const struct audio_port_config *device, sp<EffectHalInterface> effect) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (device == nullptr || effect == nullptr) {
+        AUGMENT_LOG(E, "device or effect not initialized");
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -810,8 +892,8 @@
             ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
                     *device, isInput, 0));
     if (requestedPortConfig.ext.getTag() != AudioPortExt::Tag::device) {
-        ALOGE("%s: provided port config is not a device port config: %s",
-                __func__, requestedPortConfig.toString().c_str());
+        AUGMENT_LOG(E, "provided port config is not a device port config: %s",
+                    requestedPortConfig.toString().c_str());
         return BAD_VALUE;
     }
     AudioPortConfig devicePortConfig;
@@ -829,10 +911,11 @@
 }
 status_t DeviceHalAidl::removeDeviceEffect(
         const struct audio_port_config *device, sp<EffectHalInterface> effect) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (device == nullptr || effect == nullptr) {
+        AUGMENT_LOG(E, "device or effect not initialized");
         return BAD_VALUE;
     }
     bool isInput = VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(
@@ -841,8 +924,8 @@
             ::aidl::android::legacy2aidl_audio_port_config_AudioPortConfig(
                     *device, isInput, 0));
     if (requestedPortConfig.ext.getTag() != AudioPortExt::Tag::device) {
-        ALOGE("%s: provided port config is not a device port config: %s",
-                __func__, requestedPortConfig.toString().c_str());
+        AUGMENT_LOG(E, "provided port config is not a device port config: %s",
+                    requestedPortConfig.toString().c_str());
         return BAD_VALUE;
     }
     AudioPortConfig devicePortConfig;
@@ -860,11 +943,13 @@
 status_t DeviceHalAidl::getMmapPolicyInfos(
         media::audio::common::AudioMMapPolicyType policyType,
         std::vector<media::audio::common::AudioMMapPolicyInfo>* policyInfos) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
-    AudioMMapPolicyType mmapPolicyType = VALUE_OR_RETURN_STATUS(
-            cpp2ndk_AudioMMapPolicyType(policyType));
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
+    AudioMMapPolicyType mmapPolicyType =
+            VALUE_OR_RETURN_STATUS(cpp2ndk_AudioMMapPolicyType(policyType));
 
     std::vector<AudioMMapPolicyInfo> mmapPolicyInfos;
 
@@ -880,9 +965,10 @@
 }
 
 int32_t DeviceHalAidl::getAAudioMixerBurstCount() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t mixerBurstCount = 0;
     if (mModule->getAAudioMixerBurstCount(&mixerBurstCount).isOk()) {
         return mixerBurstCount;
@@ -891,9 +977,10 @@
 }
 
 int32_t DeviceHalAidl::getAAudioHardwareBurstMinUsec() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t hardwareBurstMinUsec = 0;
     if (mModule->getAAudioHardwareBurstMinUsec(&hardwareBurstMinUsec).isOk()) {
         return hardwareBurstMinUsec;
@@ -902,9 +989,10 @@
 }
 
 error::Result<audio_hw_sync_t> DeviceHalAidl::getHwAvSync() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     int32_t aidlHwAvSync;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->generateHwAvSyncId(&aidlHwAvSync)));
     return VALUE_OR_RETURN_STATUS(
@@ -920,55 +1008,59 @@
 }
 
 status_t DeviceHalAidl::supportsBluetoothVariableLatency(bool* supports) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (supports == nullptr) {
         return BAD_VALUE;
     }
     return statusTFromBinderStatus(mModule->supportsVariableLatency(supports));
 }
 
-status_t DeviceHalAidl::getSoundDoseInterface(const std::string& module,
-                                              ::ndk::SpAIBinder* soundDoseBinder)  {
+status_t DeviceHalAidl::getSoundDoseInterface([[maybe_unused]] const std::string& module,
+                                              ::ndk::SpAIBinder* soundDoseBinder) {
+    AUGMENT_LOG(V);
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
+
     if (soundDoseBinder == nullptr) {
         return BAD_VALUE;
     }
     if (mSoundDose == nullptr) {
-        ALOGE("%s failed to retrieve the sound dose interface for module %s",
-                __func__, module.c_str());
+        AUGMENT_LOG(E, "failed to retrieve the sound dose interface");
         return BAD_VALUE;
     }
 
     if (mSoundDose == nullptr) {
-        ALOGE("%s failed to return the sound dose interface for module %s: not implemented",
-                  __func__,
-                  module.c_str());
+        AUGMENT_LOG(E, "failed to return the sound dose interface not implemented");
         return NO_INIT;
     }
 
     *soundDoseBinder = mSoundDose->asBinder();
-    ALOGI("%s using audio AIDL HAL sound dose interface", __func__);
+    AUGMENT_LOG(I, "using audio AIDL HAL sound dose interface");
     return OK;
 }
 
 status_t DeviceHalAidl::prepareToDisconnectExternalDevice(const struct audio_port_v7* port) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
+        AUGMENT_LOG(E, "port not initialized");
         return BAD_VALUE;
     }
-    const bool isInput = VALUE_OR_RETURN_STATUS(
-            ::aidl::android::portDirection(port->role, port->type)) ==
-                    ::aidl::android::AudioPortDirection::INPUT;
+    const bool isInput =
+            VALUE_OR_RETURN_STATUS(::aidl::android::portDirection(port->role, port->type)) ==
+            ::aidl::android::AudioPortDirection::INPUT;
     AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
     if (aidlPort.ext.getTag() != AudioPortExt::device) {
-        ALOGE("%s: provided port is not a device port (module %s): %s",
-              __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        AUGMENT_LOG(E, "provided port is not a device port: %s", aidlPort.toString().c_str());
         return BAD_VALUE;
     }
+
+    AUGMENT_LOG(D, "device %s", aidlPort.toString().c_str());
+
     status_t status = NO_ERROR;
     {
         std::lock_guard l(mLock);
@@ -989,10 +1081,11 @@
 }
 
 status_t DeviceHalAidl::setConnectedState(const struct audio_port_v7 *port, bool connected) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     if (port == nullptr) {
+        AUGMENT_LOG(E, "port not initialized");
         return BAD_VALUE;
     }
     if (!connected) {
@@ -1011,17 +1104,18 @@
     AudioPort aidlPort = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_port_v7_AudioPort(*port, isInput));
     if (aidlPort.ext.getTag() != AudioPortExt::device) {
-        ALOGE("%s: provided port is not a device port (module %s): %s",
-                __func__, mInstance.c_str(), aidlPort.toString().c_str());
+        AUGMENT_LOG(E, "provided port is not a device port: %s", aidlPort.toString().c_str());
         return BAD_VALUE;
     }
+    AUGMENT_LOG(D, "connected %d port: %s", connected, aidlPort.toString().c_str());
     std::lock_guard l(mLock);
     return mMapper.setDevicePortConnectedState(aidlPort, connected);
 }
 
 status_t DeviceHalAidl::setSimulateDeviceConnections(bool enabled) {
+    AUGMENT_LOG(V);
     TIME_CHECK();
-    if (mModule == nullptr) return NO_INIT;
+    RETURN_IF_MODULE_NOT_INIT(NO_INIT);
     {
         std::lock_guard l(mLock);
         mMapper.resetUnusedPatchesAndPortConfigs();
@@ -1030,9 +1124,9 @@
     status_t status = statusTFromBinderStatus(mModule->setModuleDebug(debug));
     // This is important to log as it affects HAL behavior.
     if (status == OK) {
-        ALOGI("%s: set enabled: %d", __func__, enabled);
+        AUGMENT_LOG(I, "set enabled: %d", enabled);
     } else {
-        ALOGW("%s: set enabled to %d failed: %d", __func__, enabled, status);
+        AUGMENT_LOG(W, "set enabled to %d failed: %d", enabled, status);
     }
     return status;
 }
@@ -1047,7 +1141,7 @@
                             mBluetoothA2dp->supportsOffloadReconfiguration(&supports)));
             result->addInt(key, supports ? 1 : 0);
         } else {
-            ALOGI("%s: no IBluetoothA2dp on %s", __func__, mInstance.c_str());
+            AUGMENT_LOG(I, "no IBluetoothA2dp");
             result->addInt(key, 0);
         }
     }
@@ -1064,7 +1158,7 @@
                             mBluetoothLe->supportsOffloadReconfiguration(&supports)));
             result->addInt(key, supports ? 1 : 0);
         } else {
-            ALOGI("%s: no mBluetoothLe on %s", __func__, mInstance.c_str());
+            AUGMENT_LOG(I, "no mBluetoothLe");
             result->addInt(key, 0);
         }
     }
@@ -1075,29 +1169,29 @@
     std::optional<bool> a2dpEnabled;
     std::optional<std::vector<VendorParameter>> reconfigureOffload;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtA2dpSuspended),
-                    [&a2dpEnabled](const String8& trueOrFalse) {
-                        if (trueOrFalse == AudioParameter::valueTrue) {
-                            a2dpEnabled = false;  // 'suspended' == true
-                            return OK;
-                        } else if (trueOrFalse == AudioParameter::valueFalse) {
-                            a2dpEnabled = true;  // 'suspended' == false
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtA2dpSuspended),
+            [&a2dpEnabled, this](const String8& trueOrFalse) {
+                if (trueOrFalse == AudioParameter::valueTrue) {
+                    a2dpEnabled = false;  // 'suspended' == true
+                    return OK;
+                } else if (trueOrFalse == AudioParameter::valueFalse) {
+                    a2dpEnabled = true;  // 'suspended' == false
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtA2dpSuspended, trueOrFalse.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyReconfigA2dp),
-                    [&](const String8& value) -> status_t {
-                        std::vector<VendorParameter> result;
-                        RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                                mVendorExt->parseBluetoothA2dpReconfigureOffload(
-                                        std::string(value.c_str()), &result)));
-                        reconfigureOffload = std::move(result);
-                        return OK;
-                    }));
+            parameters, String8(AudioParameter::keyReconfigA2dp),
+            [&](const String8& value) -> status_t {
+                std::vector<VendorParameter> result;
+                RETURN_STATUS_IF_ERROR(
+                        statusTFromBinderStatus(mVendorExt->parseBluetoothA2dpReconfigureOffload(
+                                std::string(value.c_str()), &result)));
+                reconfigureOffload = std::move(result);
+                return OK;
+            }));
     if (mBluetoothA2dp != nullptr && a2dpEnabled.has_value()) {
         return statusTFromBinderStatus(mBluetoothA2dp->setEnabled(a2dpEnabled.value()));
     }
@@ -1111,34 +1205,33 @@
 status_t DeviceHalAidl::filterAndUpdateBtHfpParameters(AudioParameter &parameters) {
     IBluetooth::HfpConfig hfpConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtHfpEnable),
-                    [&hfpConfig](const String8& trueOrFalse) {
-                        if (trueOrFalse == AudioParameter::valueTrue) {
-                            hfpConfig.isEnabled = Boolean{ .value = true };
-                            return OK;
-                        } else if (trueOrFalse == AudioParameter::valueFalse) {
-                            hfpConfig.isEnabled = Boolean{ .value = false };
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtHfpEnable),
+            [&hfpConfig, this](const String8& trueOrFalse) {
+                if (trueOrFalse == AudioParameter::valueTrue) {
+                    hfpConfig.isEnabled = Boolean{.value = true};
+                    return OK;
+                } else if (trueOrFalse == AudioParameter::valueFalse) {
+                    hfpConfig.isEnabled = Boolean{.value = false};
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtHfpEnable, trueOrFalse.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
-                    parameters, String8(AudioParameter::keyBtHfpSamplingRate),
-                    [&hfpConfig](int sampleRate) {
-                        return sampleRate > 0 ?
-                                hfpConfig.sampleRate = Int{ .value = sampleRate }, OK : BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtHfpSamplingRate),
+            [&hfpConfig](int sampleRate) {
+                return sampleRate > 0 ? hfpConfig.sampleRate = Int{.value = sampleRate},
+                                        OK : BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
-                    parameters, String8(AudioParameter::keyBtHfpVolume),
-                    [&hfpConfig](int volume0to15) {
-                        if (volume0to15 >= 0 && volume0to15 <= 15) {
-                            hfpConfig.volume = Float{ .value = volume0to15 / 15.0f };
-                            return OK;
-                        }
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtHfpVolume), [&hfpConfig](int volume0to15) {
+                if (volume0to15 >= 0 && volume0to15 <= 15) {
+                    hfpConfig.volume = Float{.value = volume0to15 / 15.0f};
+                    return OK;
+                }
+                return BAD_VALUE;
+            }));
     if (mBluetooth != nullptr && hfpConfig != IBluetooth::HfpConfig{}) {
         IBluetooth::HfpConfig newHfpConfig;
         return statusTFromBinderStatus(mBluetooth->setHfpConfig(hfpConfig, &newHfpConfig));
@@ -1150,39 +1243,39 @@
     std::optional<bool> leEnabled;
     std::optional<std::vector<VendorParameter>> reconfigureOffload;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtLeSuspended),
-                    [&leEnabled](const String8& trueOrFalse) {
-                        if (trueOrFalse == AudioParameter::valueTrue) {
-                            leEnabled = false;  // 'suspended' == true
-                            return OK;
-                        } else if (trueOrFalse == AudioParameter::valueFalse) {
-                            leEnabled = true;  // 'suspended' == false
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtLeSuspended),
+            [&leEnabled, this](const String8& trueOrFalse) {
+                if (trueOrFalse == AudioParameter::valueTrue) {
+                    leEnabled = false;  // 'suspended' == true
+                    return OK;
+                } else if (trueOrFalse == AudioParameter::valueFalse) {
+                    leEnabled = true;  // 'suspended' == false
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtLeSuspended, trueOrFalse.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyReconfigLe),
-                    [&](const String8& value) -> status_t {
-                        if (mVendorExt != nullptr) {
-                            std::vector<VendorParameter> result;
-                            RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
-                                    mVendorExt->parseBluetoothLeReconfigureOffload(
-                                            std::string(value.c_str()), &result)));
-                            reconfigureOffload = std::move(result);
-                        } else {
-                            reconfigureOffload = std::vector<VendorParameter>();
-                        }
-                        return OK;
-                    }));
+            parameters, String8(AudioParameter::keyReconfigLe),
+            [&](const String8& value) -> status_t {
+                if (mVendorExt != nullptr) {
+                    std::vector<VendorParameter> result;
+                    RETURN_STATUS_IF_ERROR(
+                            statusTFromBinderStatus(mVendorExt->parseBluetoothLeReconfigureOffload(
+                                    std::string(value.c_str()), &result)));
+                    reconfigureOffload = std::move(result);
+                } else {
+                    reconfigureOffload = std::vector<VendorParameter>();
+                }
+                return OK;
+            }));
     if (mBluetoothLe != nullptr && leEnabled.has_value()) {
         return statusTFromBinderStatus(mBluetoothLe->setEnabled(leEnabled.value()));
     }
     if (mBluetoothLe != nullptr && reconfigureOffload.has_value()) {
-        return statusTFromBinderStatus(mBluetoothLe->reconfigureOffload(
-                        reconfigureOffload.value()));
+        return statusTFromBinderStatus(
+                mBluetoothLe->reconfigureOffload(reconfigureOffload.value()));
     }
     return OK;
 }
@@ -1190,53 +1283,53 @@
 status_t DeviceHalAidl::filterAndUpdateBtScoParameters(AudioParameter &parameters) {
     IBluetooth::ScoConfig scoConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtSco),
-                    [&scoConfig](const String8& onOrOff) {
-                        if (onOrOff == AudioParameter::valueOn) {
-                            scoConfig.isEnabled = Boolean{ .value = true };
-                            return OK;
-                        } else if (onOrOff == AudioParameter::valueOff) {
-                            scoConfig.isEnabled = Boolean{ .value = false };
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtSco, onOrOff.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtSco),
+            [&scoConfig, this](const String8& onOrOff) {
+                if (onOrOff == AudioParameter::valueOn) {
+                    scoConfig.isEnabled = Boolean{.value = true};
+                    return OK;
+                } else if (onOrOff == AudioParameter::valueOff) {
+                    scoConfig.isEnabled = Boolean{.value = false};
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtSco, onOrOff.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtScoHeadsetName),
-                    [&scoConfig](const String8& name) {
-                        scoConfig.debugName = name;
-                        return OK;
-                    }));
+            parameters, String8(AudioParameter::keyBtScoHeadsetName),
+            [&scoConfig](const String8& name) {
+                scoConfig.debugName = name;
+                return OK;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtNrec),
-                    [&scoConfig](const String8& onOrOff) {
-                        if (onOrOff == AudioParameter::valueOn) {
-                            scoConfig.isNrecEnabled = Boolean{ .value = true };
-                            return OK;
-                        } else if (onOrOff == AudioParameter::valueOff) {
-                            scoConfig.isNrecEnabled = Boolean{ .value = false };
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtNrec, onOrOff.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtNrec),
+            [&scoConfig, this](const String8& onOrOff) {
+                if (onOrOff == AudioParameter::valueOn) {
+                    scoConfig.isNrecEnabled = Boolean{.value = true};
+                    return OK;
+                } else if (onOrOff == AudioParameter::valueOff) {
+                    scoConfig.isNrecEnabled = Boolean{.value = false};
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtNrec, onOrOff.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyBtScoWb),
-                    [&scoConfig](const String8& onOrOff) {
-                        if (onOrOff == AudioParameter::valueOn) {
-                            scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
-                            return OK;
-                        } else if (onOrOff == AudioParameter::valueOff) {
-                            scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyBtScoWb, onOrOff.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyBtScoWb),
+            [&scoConfig, this](const String8& onOrOff) {
+                if (onOrOff == AudioParameter::valueOn) {
+                    scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO_WB;
+                    return OK;
+                } else if (onOrOff == AudioParameter::valueOff) {
+                    scoConfig.mode = IBluetooth::ScoConfig::Mode::SCO;
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyBtScoWb, onOrOff.c_str());
+                return BAD_VALUE;
+            }));
     if (mBluetooth != nullptr && scoConfig != IBluetooth::ScoConfig{}) {
         IBluetooth::ScoConfig newScoConfig;
         return statusTFromBinderStatus(mBluetooth->setScoConfig(scoConfig, &newScoConfig));
@@ -1246,34 +1339,41 @@
 
 status_t DeviceHalAidl::filterAndUpdateScreenParameters(AudioParameter &parameters) {
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyScreenState),
-                    [&](const String8& onOrOff) -> status_t {
-                        std::optional<bool> isTurnedOn;
-                        if (onOrOff == AudioParameter::valueOn) {
-                            isTurnedOn = true;
-                        } else if (onOrOff == AudioParameter::valueOff) {
-                            isTurnedOn = false;
-                        }
-                        if (!isTurnedOn.has_value()) {
-                            ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                    AudioParameter::keyScreenState, onOrOff.c_str());
-                            return BAD_VALUE;
-                        }
-                        return statusTFromBinderStatus(
-                                mModule->updateScreenState(isTurnedOn.value()));
-                    }));
+            parameters, String8(AudioParameter::keyScreenState),
+            [&, this](const String8& onOrOff) -> status_t {
+                std::optional<bool> isTurnedOn;
+                if (onOrOff == AudioParameter::valueOn) {
+                    isTurnedOn = true;
+                } else if (onOrOff == AudioParameter::valueOff) {
+                    isTurnedOn = false;
+                }
+                if (!isTurnedOn.has_value()) {
+                    AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                                AudioParameter::keyScreenState, onOrOff.c_str());
+                    return BAD_VALUE;
+                }
+                return statusTFromBinderStatus(mModule->updateScreenState(isTurnedOn.value()));
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
-                    parameters, String8(AudioParameter::keyScreenRotation),
-            [&](int rotationDegrees) -> status_t {
+            parameters, String8(AudioParameter::keyScreenRotation),
+            [&, this](int rotationDegrees) -> status_t {
                 IModule::ScreenRotation rotation;
                 switch (rotationDegrees) {
-                    case 0: rotation = IModule::ScreenRotation::DEG_0; break;
-                    case 90: rotation = IModule::ScreenRotation::DEG_90; break;
-                    case 180: rotation = IModule::ScreenRotation::DEG_180; break;
-                    case 270: rotation = IModule::ScreenRotation::DEG_270; break;
+                    case 0:
+                        rotation = IModule::ScreenRotation::DEG_0;
+                        break;
+                    case 90:
+                        rotation = IModule::ScreenRotation::DEG_90;
+                        break;
+                    case 180:
+                        rotation = IModule::ScreenRotation::DEG_180;
+                        break;
+                    case 270:
+                        rotation = IModule::ScreenRotation::DEG_270;
+                        break;
                     default:
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value %d",
-                                AudioParameter::keyScreenRotation, rotationDegrees);
+                        AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value %d",
+                                    AudioParameter::keyScreenRotation, rotationDegrees);
                         return BAD_VALUE;
                 }
                 return statusTFromBinderStatus(mModule->updateScreenRotation(rotation));
@@ -1285,49 +1385,48 @@
     using TtyMode = ITelephony::TelecomConfig::TtyMode;
     ITelephony::TelecomConfig telConfig;
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyTtyMode),
-                    [&telConfig](const String8& mode) {
-                        if (mode == AudioParameter::valueTtyModeOff) {
-                            telConfig.ttyMode = TtyMode::OFF;
-                            return OK;
-                        } else if (mode == AudioParameter::valueTtyModeFull) {
-                            telConfig.ttyMode = TtyMode::FULL;
-                            return OK;
-                        } else if (mode == AudioParameter::valueTtyModeHco) {
-                            telConfig.ttyMode = TtyMode::HCO;
-                            return OK;
-                        } else if (mode == AudioParameter::valueTtyModeVco) {
-                            telConfig.ttyMode = TtyMode::VCO;
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyTtyMode, mode.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyTtyMode),
+            [&telConfig, this](const String8& mode) {
+                if (mode == AudioParameter::valueTtyModeOff) {
+                    telConfig.ttyMode = TtyMode::OFF;
+                    return OK;
+                } else if (mode == AudioParameter::valueTtyModeFull) {
+                    telConfig.ttyMode = TtyMode::FULL;
+                    return OK;
+                } else if (mode == AudioParameter::valueTtyModeHco) {
+                    telConfig.ttyMode = TtyMode::HCO;
+                    return OK;
+                } else if (mode == AudioParameter::valueTtyModeVco) {
+                    telConfig.ttyMode = TtyMode::VCO;
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyTtyMode, mode.c_str());
+                return BAD_VALUE;
+            }));
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<String8>(
-                    parameters, String8(AudioParameter::keyHacSetting),
-                    [&telConfig](const String8& onOrOff) {
-                        if (onOrOff == AudioParameter::valueHacOn) {
-                            telConfig.isHacEnabled = Boolean{ .value = true };
-                            return OK;
-                        } else if (onOrOff == AudioParameter::valueHacOff) {
-                            telConfig.isHacEnabled = Boolean{ .value = false };
-                            return OK;
-                        }
-                        ALOGE("setParameters: parameter key \"%s\" has invalid value \"%s\"",
-                                AudioParameter::keyHacSetting, onOrOff.c_str());
-                        return BAD_VALUE;
-                    }));
+            parameters, String8(AudioParameter::keyHacSetting),
+            [&telConfig, this](const String8& onOrOff) {
+                if (onOrOff == AudioParameter::valueHacOn) {
+                    telConfig.isHacEnabled = Boolean{.value = true};
+                    return OK;
+                } else if (onOrOff == AudioParameter::valueHacOff) {
+                    telConfig.isHacEnabled = Boolean{.value = false};
+                    return OK;
+                }
+                AUGMENT_LOG(E, "setParameters: parameter key \"%s\" has invalid value \"%s\"",
+                            AudioParameter::keyHacSetting, onOrOff.c_str());
+                return BAD_VALUE;
+            }));
     if (mTelephony != nullptr && telConfig != ITelephony::TelecomConfig{}) {
         ITelephony::TelecomConfig newTelConfig;
-        return statusTFromBinderStatus(
-                mTelephony->setTelecomConfig(telConfig, &newTelConfig));
+        return statusTFromBinderStatus(mTelephony->setTelecomConfig(telConfig, &newTelConfig));
     }
     return OK;
 }
 
 void DeviceHalAidl::clearCallbacks(void* cookie) {
-    std::lock_guard l(mLock);
+    std::lock_guard l(mCallbacksLock);
     mCallbacks.erase(cookie);
 }
 
@@ -1360,18 +1459,21 @@
     setCallbackImpl(cookie, &Callbacks::latency, cb);
 }
 
-template<class C>
+template <class C>
 sp<C> DeviceHalAidl::getCallbackImpl(void* cookie, wp<C> DeviceHalAidl::Callbacks::* field) {
-    std::lock_guard l(mLock);
-    if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
-        return ((it->second).*field).promote();
+    wp<C> result;
+    {
+        std::lock_guard l(mCallbacksLock);
+        if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
+            result = (it->second).*field;
+        }
     }
-    return nullptr;
+    return result.promote();
 }
 template<class C>
 void DeviceHalAidl::setCallbackImpl(
         void* cookie, wp<C> DeviceHalAidl::Callbacks::* field, const sp<C>& cb) {
-    std::lock_guard l(mLock);
+    std::lock_guard l(mCallbacksLock);
     if (auto it = mCallbacks.find(cookie); it != mCallbacks.end()) {
         (it->second).*field = cb;
     }
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index bcd495d..6ae6402 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -119,7 +119,9 @@
     // by releasing all references to the returned object.
     status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
                               audio_output_flags_t flags, struct audio_config* config,
-                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+                              const char* address, sp<StreamOutHalInterface>* outStream,
+                              const std::vector<playback_track_metadata_v7_t>&
+                                                               sourceMetadata = {}) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
@@ -233,7 +235,6 @@
     // MicrophoneInfoProvider implementation
     MicrophoneInfoProvider::Info const* getMicrophoneInfo() override;
 
-    const std::string mInstance;
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
     const std::shared_ptr<::aidl::android::hardware::audio::core::ITelephony> mTelephony;
@@ -242,8 +243,11 @@
     const std::shared_ptr<::aidl::android::hardware::audio::core::IBluetoothLe> mBluetoothLe;
     const std::shared_ptr<::aidl::android::hardware::audio::core::sounddose::ISoundDose> mSoundDose;
 
+    std::mutex mCallbacksLock;
+    // Use 'mCallbacksLock' only to implement exclusive access to 'mCallbacks'. Never hold it
+    // while making any calls.
+    std::map<void*, Callbacks> mCallbacks GUARDED_BY(mCallbacksLock);
     std::mutex mLock;
-    std::map<void*, Callbacks> mCallbacks GUARDED_BY(mLock);
     std::set<audio_port_handle_t> mDeviceDisconnectionNotified GUARDED_BY(mLock);
     Hal2AidlMapper mMapper GUARDED_BY(mLock);
     LockedAccessor<Hal2AidlMapper> mMapperAccessor;
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index ea4258c..263ef96 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -259,7 +259,8 @@
         audio_output_flags_t flags,
         struct audio_config *config,
         const char *address,
-        sp<StreamOutHalInterface> *outStream) {
+        sp<StreamOutHalInterface> *outStream,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
@@ -273,6 +274,16 @@
         return status;
     }
 
+#if MAJOR_VERSION == 4
+    ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#else
+    ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#endif
+
+    RETURN_STATUS_IF_ERROR(CoreUtils::sourceMetadataFromHalV7(
+            sourceMetadata, true /*ignoreNonVendorTags*/, &hidlMetadata
+            ));
+
 #if !(MAJOR_VERSION == 7 && MINOR_VERSION == 1)
     //TODO: b/193496180 use spatializer flag at audio HAL when available
     if ((flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
@@ -294,7 +305,7 @@
 #endif
             handle, hidlDevice, hidlConfig, hidlFlags,
 #if MAJOR_VERSION >= 4
-            {} /* metadata */,
+            hidlMetadata /* metadata */,
 #endif
             [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& result,
                     const AudioConfig& suggestedConfig) {
@@ -608,7 +619,14 @@
             result != NO_ERROR) {
         return result;
     }
-    return processReturn("setConnectedState", mDevice->setConnectedState(hidlAddress, connected));
+    Return<Result> ret = mDevice->setConnectedState(hidlAddress, connected);
+    if (ret.isOk() || ret == Result::NOT_SUPPORTED) {
+        // The framework is only interested in errors occurring due to connection state handling,
+        // so it can decide whether retrying is needed. If the HAL does not support this operation,
+        // it's not an error.
+        return NO_ERROR;
+    }
+    return processReturn("setConnectedState", ret);
 }
 
 error::Result<audio_hw_sync_t> DeviceHalHidl::getHwAvSync() {
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 1362dab..5f3e08c 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -73,7 +73,9 @@
     // by releasing all references to the returned object.
     status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
                               audio_output_flags_t flags, struct audio_config* config,
-                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+                              const char* address, sp<StreamOutHalInterface>* outStream,
+                              const std::vector<playback_track_metadata_v7_t>&
+                                                                sourceMetadata = {}) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
index ff6126d..f719d97 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.cpp
@@ -29,6 +29,7 @@
 #include <system/audio_effects/effect_visualizer.h>
 
 #include <utils/Log.h>
+#include <Utils.h>
 
 #include "EffectConversionHelperAidl.h"
 #include "EffectProxy.h"
@@ -37,18 +38,20 @@
 namespace effect {
 
 using ::aidl::android::aidl_utils::statusTFromBinderStatus;
+using ::aidl::android::hardware::audio::common::getChannelCount;
 using ::aidl::android::hardware::audio::effect::CommandId;
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::Flags;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::Parameter;
 using ::aidl::android::hardware::audio::effect::State;
+using ::aidl::android::media::audio::common::AudioChannelLayout;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioMode;
 using ::aidl::android::media::audio::common::AudioSource;
-using ::android::hardware::EventFlag;
 using android::effect::utils::EffectParamReader;
 using android::effect::utils::EffectParamWriter;
+using android::hardware::EventFlag;
 
 using ::android::status_t;
 
@@ -519,5 +522,23 @@
     return OK;
 }
 
+size_t EffectConversionHelperAidl::getAudioChannelCount() const {
+    return getChannelCount(mCommon.input.base.channelMask,
+                           ~AudioChannelLayout::LAYOUT_HAPTIC_AB /* mask */);
+}
+
+size_t EffectConversionHelperAidl::getHapticChannelCount() const {
+    return getChannelCount(mCommon.input.base.channelMask,
+                           AudioChannelLayout::LAYOUT_HAPTIC_AB /* mask */);
+}
+
+size_t EffectConversionHelperAidl::getInputChannelCount() const {
+    return getChannelCount(mCommon.input.base.channelMask);
+}
+
+size_t EffectConversionHelperAidl::getOutputChannelCount() const {
+    return getChannelCount(mCommon.output.base.channelMask);
+}
+
 }  // namespace effect
 }  // namespace android
diff --git a/media/libaudiohal/impl/EffectConversionHelperAidl.h b/media/libaudiohal/impl/EffectConversionHelperAidl.h
index 29c5a83..e9e9fc2 100644
--- a/media/libaudiohal/impl/EffectConversionHelperAidl.h
+++ b/media/libaudiohal/impl/EffectConversionHelperAidl.h
@@ -49,6 +49,11 @@
     ::aidl::android::hardware::audio::effect::Descriptor getDescriptor() const;
     status_t reopen();
 
+    size_t getAudioChannelCount() const;
+    size_t getHapticChannelCount() const;
+    size_t getInputChannelCount() const;
+    size_t getOutputChannelCount() const;
+
     uint8_t mOutputAccessMode = EFFECT_BUFFER_ACCESS_WRITE;
 
   protected:
diff --git a/media/libaudiohal/impl/EffectHalAidl.cpp b/media/libaudiohal/impl/EffectHalAidl.cpp
index 87d24a5..9fdde49 100644
--- a/media/libaudiohal/impl/EffectHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectHalAidl.cpp
@@ -15,6 +15,7 @@
  */
 
 #include <cstddef>
+#include <cstring>
 #define LOG_TAG "EffectHalAidl"
 //#define LOG_NDEBUG 0
 
@@ -58,7 +59,9 @@
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IEffect;
 using ::aidl::android::hardware::audio::effect::IFactory;
+using ::aidl::android::hardware::audio::effect::kEventFlagDataMqNotEmpty;
 using ::aidl::android::hardware::audio::effect::kEventFlagDataMqUpdate;
+using ::aidl::android::hardware::audio::effect::kEventFlagNotEmpty;
 using ::aidl::android::hardware::audio::effect::kReopenSupportedVersion;
 using ::aidl::android::hardware::audio::effect::State;
 
@@ -72,7 +75,14 @@
       mEffect(effect),
       mSessionId(sessionId),
       mIoId(ioId),
-      mIsProxyEffect(isProxyEffect) {
+      mIsProxyEffect(isProxyEffect),
+      mHalVersion([factory]() {
+          int version = 0;
+          // use factory HAL version because effect can be an EffectProxy instance
+          return factory->getInterfaceVersion(&version).isOk() ? version : 0;
+      }()),
+      mEventFlagDataMqNotEmpty(mHalVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty
+                                                                      : kEventFlagNotEmpty) {
     assert(mFactory != nullptr);
     assert(mEffect != nullptr);
     createAidlConversion(effect, sessionId, ioId, desc);
@@ -126,6 +136,7 @@
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidHapticGenerator()) {
         mConversion = std::make_unique<android::effect::AidlConversionHapticGenerator>(
                 effect, sessionId, ioId, desc, mIsProxyEffect);
+        mIsHapticGenerator = true;
     } else if (typeUuid ==
                ::aidl::android::hardware::audio::effect::getEffectTypeUuidLoudnessEnhancer()) {
         mConversion = std::make_unique<android::effect::AidlConversionLoudnessEnhancer>(
@@ -155,6 +166,7 @@
         mConversion = std::make_unique<android::effect::AidlConversionVendorExtension>(
                 effect, sessionId, ioId, desc, mIsProxyEffect);
     }
+    mEffectName = mConversion->getDescriptor().common.name;
     return OK;
 }
 
@@ -170,92 +182,165 @@
 
 // write to input FMQ here, wait for statusMQ STATUS_OK, and read from output FMQ
 status_t EffectHalAidl::process() {
-    const std::string effectName = mConversion->getDescriptor().common.name;
     State state = State::INIT;
     if (mConversion->isBypassing() || !mEffect->getState(&state).isOk() ||
         state != State::PROCESSING) {
-        ALOGI("%s skipping %s process because it's %s", __func__, effectName.c_str(),
+        ALOGI("%s skipping process because it's %s", mEffectName.c_str(),
               mConversion->isBypassing()
                       ? "bypassing"
                       : aidl::android::hardware::audio::effect::toString(state).c_str());
         return -ENODATA;
     }
 
-    // check if the DataMq needs any update, timeout at 1ns to avoid being blocked
-    auto efGroup = mConversion->getEventFlagGroup();
+    const std::shared_ptr<android::hardware::EventFlag> efGroup = mConversion->getEventFlagGroup();
     if (!efGroup) {
-        ALOGE("%s invalid efGroup", __func__);
+        ALOGE("%s invalid efGroup", mEffectName.c_str());
         return INVALID_OPERATION;
     }
 
-    // use IFactory HAL version because IEffect can be an EffectProxy instance
-    static const int halVersion = [&]() {
-        int version = 0;
-        return mFactory->getInterfaceVersion(&version).isOk() ? version : 0;
-    }();
+    // reopen if halVersion >= kReopenSupportedVersion and receive kEventFlagDataMqUpdate
+    RETURN_STATUS_IF_ERROR(maybeReopen(efGroup));
+    const size_t samplesWritten = writeToHalInputFmqAndSignal(efGroup);
+    if (0 == samplesWritten) {
+        return INVALID_OPERATION;
+    }
 
-    if (uint32_t efState = 0; halVersion >= kReopenSupportedVersion &&
-                              ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
+    RETURN_STATUS_IF_ERROR(waitHalStatusFmq(samplesWritten));
+    RETURN_STATUS_IF_ERROR(readFromHalOutputFmq(samplesWritten));
+    return OK;
+}
+
+status_t EffectHalAidl::maybeReopen(
+        const std::shared_ptr<android::hardware::EventFlag>& efGroup) const {
+    if (mHalVersion < kReopenSupportedVersion) {
+        return OK;
+    }
+
+    // check if the DataMq needs any update, timeout at 1ns to avoid being blocked
+    if (uint32_t efState = 0; ::android::OK == efGroup->wait(kEventFlagDataMqUpdate, &efState,
                                                              1 /* ns */, true /* retry */) &&
                               efState & kEventFlagDataMqUpdate) {
-        ALOGV("%s %s V%d receive dataMQUpdate eventFlag from HAL", __func__, effectName.c_str(),
-              halVersion);
-        mConversion->reopen();
+        ALOGD("%s V%d receive dataMQUpdate eventFlag from HAL", mEffectName.c_str(), mHalVersion);
+        return mConversion->reopen();
     }
-    auto statusQ = mConversion->getStatusMQ();
-    auto inputQ = mConversion->getInputMQ();
-    auto outputQ = mConversion->getOutputMQ();
-    if (!statusQ || !statusQ->isValid() || !inputQ || !inputQ->isValid() || !outputQ ||
-        !outputQ->isValid()) {
-        ALOGE("%s invalid FMQ [Status %d I %d O %d]", __func__, statusQ ? statusQ->isValid() : 0,
-              inputQ ? inputQ->isValid() : 0, outputQ ? outputQ->isValid() : 0);
-        return INVALID_OPERATION;
+    return OK;
+}
+
+size_t EffectHalAidl::writeToHalInputFmqAndSignal(
+        const std::shared_ptr<android::hardware::EventFlag>& efGroup) const {
+    const auto inputQ = mConversion->getInputMQ();
+    if (!inputQ || !inputQ->isValid()) {
+        ALOGE("%s invalid input FMQ", mEffectName.c_str());
+        return 0;
     }
 
-    size_t available = inputQ->availableToWrite();
-    size_t floatsToWrite = std::min(available, mInBuffer->getSize() / sizeof(float));
-    if (floatsToWrite == 0) {
-        ALOGE("%s not able to write, floats in buffer %zu, space in FMQ %zu", __func__,
-              mInBuffer->getSize() / sizeof(float), available);
+    const size_t fmqSpaceSamples = inputQ->availableToWrite();
+    const size_t samplesInBuffer =
+            mInBuffer->audioBuffer()->frameCount * mConversion->getInputChannelCount();
+    const size_t samplesToWrite = std::min(fmqSpaceSamples, samplesInBuffer);
+    if (samplesToWrite == 0) {
+        ALOGE("%s not able to write, samplesInBuffer %zu, fmqSpaceSamples %zu", mEffectName.c_str(),
+              samplesInBuffer, fmqSpaceSamples);
+        return 0;
+    }
+
+    const float* const inputRawBuffer = static_cast<const float*>(mInBuffer->audioBuffer()->f32);
+    if (!inputQ->write(inputRawBuffer, samplesToWrite)) {
+        ALOGE("%s failed to write %zu samples to inputQ [avail %zu]", mEffectName.c_str(),
+              samplesToWrite, inputQ->availableToWrite());
+        return 0;
+    }
+
+    efGroup->wake(mEventFlagDataMqNotEmpty);
+    return samplesToWrite;
+}
+
+void EffectHalAidl::writeHapticGeneratorData(size_t totalSamples, float* const outputRawBuffer,
+                                             float* const fmqOutputBuffer) const {
+    const auto audioChNum = mConversion->getAudioChannelCount();
+    const auto audioSamples =
+            totalSamples * audioChNum / (audioChNum + mConversion->getHapticChannelCount());
+
+    static constexpr float kHalFloatSampleLimit = 2.0f;
+    // for HapticGenerator, the input data buffer will be updated
+    float* const inputRawBuffer = static_cast<float*>(mInBuffer->audioBuffer()->f32);
+    // accumulate or copy input to output, haptic samples remains all zero
+    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+        accumulate_float(outputRawBuffer, inputRawBuffer, audioSamples);
+    } else {
+        memcpy_to_float_from_float_with_clamping(outputRawBuffer, inputRawBuffer, audioSamples,
+                                                 kHalFloatSampleLimit);
+    }
+    // append the haptic sample at the end of input audio samples
+    memcpy_to_float_from_float_with_clamping(inputRawBuffer + audioSamples,
+                                             fmqOutputBuffer + audioSamples,
+                                             totalSamples - audioSamples, kHalFloatSampleLimit);
+}
+
+status_t EffectHalAidl::waitHalStatusFmq(size_t samplesWritten) const {
+    const auto statusQ = mConversion->getStatusMQ();
+    if (const bool statusValid = statusQ && statusQ->isValid(); !statusValid) {
+        ALOGE("%s statusFMQ %s", mEffectName.c_str(), statusValid ? "valid" : "invalid");
         return INVALID_OPERATION;
     }
-    if (!mInBuffer->audioBuffer() ||
-        !inputQ->write((float*)mInBuffer->audioBuffer()->f32, floatsToWrite)) {
-        ALOGE("%s failed to write %zu floats from audiobuffer %p to inputQ [avail %zu]", __func__,
-              floatsToWrite, mInBuffer->audioBuffer(), inputQ->availableToWrite());
-        return INVALID_OPERATION;
-    }
-    efGroup->wake(aidl::android::hardware::audio::effect::kEventFlagNotEmpty);
 
     IEffect::Status retStatus{};
-    if (!statusQ->readBlocking(&retStatus, 1) || retStatus.status != OK ||
-        (size_t)retStatus.fmqConsumed != floatsToWrite || retStatus.fmqProduced == 0) {
-        ALOGE("%s read status failed: %s", __func__, retStatus.toString().c_str());
+    if (!statusQ->readBlocking(&retStatus, 1)) {
+        ALOGE("%s V%d read status from status FMQ failed", mEffectName.c_str(), mHalVersion);
+        return INVALID_OPERATION;
+    }
+    if (retStatus.status != OK || (size_t)retStatus.fmqConsumed != samplesWritten ||
+        retStatus.fmqProduced == 0) {
+        ALOGE("%s read status failed: %s, FMQ consumed %d (of %zu) produced %d",
+              mEffectName.c_str(), retStatus.toString().c_str(), retStatus.fmqConsumed,
+              samplesWritten, retStatus.fmqProduced);
         return INVALID_OPERATION;
     }
 
-    available = outputQ->availableToRead();
-    size_t floatsToRead = std::min(available, mOutBuffer->getSize() / sizeof(float));
-    if (floatsToRead == 0) {
-        ALOGE("%s not able to read, buffer space %zu, floats in FMQ %zu", __func__,
-              mOutBuffer->getSize() / sizeof(float), available);
+    return OK;
+}
+
+status_t EffectHalAidl::readFromHalOutputFmq(size_t samplesWritten) const {
+    const auto outputQ = mConversion->getOutputMQ();
+    if (const bool outputValid = outputQ && outputQ->isValid(); !outputValid) {
+        ALOGE("%s outputFMQ %s", mEffectName.c_str(), outputValid ? "valid" : "invalid");
         return INVALID_OPERATION;
     }
 
-    float *outputRawBuffer = mOutBuffer->audioBuffer()->f32;
+    const size_t fmqProducedSamples = outputQ->availableToRead();
+    const size_t bufferSpaceSamples =
+            mOutBuffer->audioBuffer()->frameCount * mConversion->getOutputChannelCount();
+    const size_t samplesToRead = std::min(fmqProducedSamples, bufferSpaceSamples);
+    if (samplesToRead == 0) {
+        ALOGE("%s unable to read, bufferSpace %zu, fmqProduced %zu samplesWritten %zu",
+              mEffectName.c_str(), bufferSpaceSamples, fmqProducedSamples, samplesWritten);
+        return INVALID_OPERATION;
+    }
+
+    float* const outputRawBuffer = static_cast<float*>(mOutBuffer->audioBuffer()->f32);
+    float* fmqOutputBuffer = outputRawBuffer;
     std::vector<float> tempBuffer;
-    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-        tempBuffer.resize(floatsToRead);
-        outputRawBuffer = tempBuffer.data();
+    // keep original data in the output buffer for accumulate mode or HapticGenerator effect
+    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE || mIsHapticGenerator) {
+        tempBuffer.resize(samplesToRead, 0);
+        fmqOutputBuffer = tempBuffer.data();
     }
     // always read floating point data for AIDL
-    if (!outputQ->read(outputRawBuffer, floatsToRead)) {
-        ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", __func__, floatsToRead,
-              mOutBuffer->audioBuffer());
+    if (!outputQ->read(fmqOutputBuffer, samplesToRead)) {
+        ALOGE("%s failed to read %zu from outputQ to audioBuffer %p", mEffectName.c_str(),
+              samplesToRead, fmqOutputBuffer);
         return INVALID_OPERATION;
     }
-    if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
-        accumulate_float(mOutBuffer->audioBuffer()->f32, outputRawBuffer, floatsToRead);
+
+    // HapticGenerator needs special handling because the generated haptic samples should append to
+    // the end of audio samples, the generated haptic data pass back from HAL in output FMQ at same
+    // offset as input buffer, here we skip the audio samples in output FMQ and append haptic
+    // samples to the end of input buffer
+    if (mIsHapticGenerator) {
+        assert(samplesRead == samplesWritten);
+        writeHapticGeneratorData(samplesToRead, outputRawBuffer, fmqOutputBuffer);
+    } else if (mConversion->mOutputAccessMode == EFFECT_BUFFER_ACCESS_ACCUMULATE) {
+        accumulate_float(outputRawBuffer, fmqOutputBuffer, samplesToRead);
     }
 
     return OK;
diff --git a/media/libaudiohal/impl/EffectHalAidl.h b/media/libaudiohal/impl/EffectHalAidl.h
index bbcb7e2..a775337 100644
--- a/media/libaudiohal/impl/EffectHalAidl.h
+++ b/media/libaudiohal/impl/EffectHalAidl.h
@@ -73,6 +73,12 @@
     const int32_t mSessionId;
     const int32_t mIoId;
     const bool mIsProxyEffect;
+    const int32_t mHalVersion;
+    // Audio effect HAL v2+ changes flag to kEventFlagDataMqNotEmpty to avoid conflict from using
+    // kEventFlagNotEmpty
+    const uint32_t mEventFlagDataMqNotEmpty;
+    bool mIsHapticGenerator = false;
+    std::string mEffectName;
 
     std::unique_ptr<EffectConversionHelperAidl> mConversion;
 
@@ -92,6 +98,14 @@
     bool setEffectReverse(bool reverse);
     bool needUpdateReturnParam(uint32_t cmdCode);
 
+    status_t maybeReopen(const std::shared_ptr<android::hardware::EventFlag>& efGroup) const;
+    void writeHapticGeneratorData(size_t totalSamples, float* const outputRawBuffer,
+                                  float* const fmqOutputBuffer) const;
+    size_t writeToHalInputFmqAndSignal(
+            const std::shared_ptr<android::hardware::EventFlag>& efGroup) const;
+    status_t waitHalStatusFmq(size_t samplesWritten) const;
+    status_t readFromHalOutputFmq(size_t samplesWritten) const;
+
     // The destructor automatically releases the effect.
     virtual ~EffectHalAidl();
 };
diff --git a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
index 64cc7ed..2753906 100644
--- a/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
+++ b/media/libaudiohal/impl/EffectsFactoryHalAidl.cpp
@@ -42,6 +42,8 @@
 using ::aidl::android::hardware::audio::effect::Descriptor;
 using ::aidl::android::hardware::audio::effect::IFactory;
 using ::aidl::android::hardware::audio::effect::Processing;
+using ::aidl::android::media::audio::common::AudioDevice;
+using ::aidl::android::media::audio::common::AudioDeviceAddress;
 using ::aidl::android::media::audio::common::AudioSource;
 using ::aidl::android::media::audio::common::AudioStreamType;
 using ::aidl::android::media::audio::common::AudioUuid;
@@ -174,9 +176,6 @@
     if (uuid == nullptr || effect == nullptr) {
         return BAD_VALUE;
     }
-    if (sessionId == AUDIO_SESSION_DEVICE && ioId == AUDIO_IO_HANDLE_NONE) {
-        return INVALID_OPERATION;
-    }
     ALOGV("%s session %d ioId %d", __func__, sessionId, ioId);
 
     AudioUuid aidlUuid =
@@ -284,7 +283,8 @@
 
     auto getConfigProcessingWithAidlProcessing =
             [&](const auto& aidlProcess, std::vector<effectsConfig::InputStream>& preprocess,
-                std::vector<effectsConfig::OutputStream>& postprocess) {
+                std::vector<effectsConfig::OutputStream>& postprocess,
+                std::vector<effectsConfig::DeviceEffects>& deviceprocess) {
                 if (aidlProcess.type.getTag() == Processing::Type::streamType) {
                     AudioStreamType aidlType =
                             aidlProcess.type.template get<Processing::Type::streamType>();
@@ -316,6 +316,25 @@
                     effectsConfig::InputStream stream = {.type = type.value(),
                                                          .effects = std::move(effects)};
                     preprocess.emplace_back(stream);
+                } else if (aidlProcess.type.getTag() == Processing::Type::device) {
+                    AudioDevice aidlDevice =
+                            aidlProcess.type.template get<Processing::Type::device>();
+                    std::vector<std::shared_ptr<const effectsConfig::Effect>> effects;
+                    std::transform(aidlProcess.ids.begin(), aidlProcess.ids.end(),
+                                   std::back_inserter(effects), getConfigEffectWithDescriptor);
+                    audio_devices_t type;
+                    char address[AUDIO_DEVICE_MAX_ADDRESS_LEN];
+                    status_t status = ::aidl::android::aidl2legacy_AudioDevice_audio_device(
+                            aidlDevice, &type, address);
+                    if (status != NO_ERROR) {
+                        ALOGE("%s device effect has invalid device type / address", __func__);
+                        return;
+                    }
+                    effectsConfig::DeviceEffects device = {
+                            {.type = type, .effects = std::move(effects)},
+                            .address = address,
+                    };
+                    deviceprocess.emplace_back(device);
                 }
             };
 
@@ -323,17 +342,21 @@
             [&]() -> std::shared_ptr<const effectsConfig::Processings> {
                 std::vector<effectsConfig::InputStream> preprocess;
                 std::vector<effectsConfig::OutputStream> postprocess;
+                std::vector<effectsConfig::DeviceEffects> deviceprocess;
                 for (const auto& processing : mAidlProcessings) {
-                    getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess);
+                    getConfigProcessingWithAidlProcessing(processing, preprocess, postprocess,
+                                                          deviceprocess);
                 }
 
-                if (0 == preprocess.size() && 0 == postprocess.size()) {
+                if (0 == preprocess.size() && 0 == postprocess.size() &&
+                    0 == deviceprocess.size()) {
                     return nullptr;
                 }
 
                 return std::make_shared<const effectsConfig::Processings>(
                         effectsConfig::Processings({.preprocess = std::move(preprocess),
-                                                    .postprocess = std::move(postprocess)}));
+                                                    .postprocess = std::move(postprocess),
+                                                    .deviceprocess = std::move(deviceprocess)}));
             }());
 
     return processings;
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index 263e3e9..0cdf0f2 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -25,6 +25,7 @@
 #include <Utils.h>
 #include <utils/Log.h>
 
+#include "AidlUtils.h"
 #include "Hal2AidlMapper.h"
 
 using aidl::android::aidl_utils::statusTFromBinderStatus;
@@ -37,6 +38,7 @@
 using aidl::android::media::audio::common::AudioDeviceType;
 using aidl::android::media::audio::common::AudioFormatDescription;
 using aidl::android::media::audio::common::AudioFormatType;
+using aidl::android::media::audio::common::AudioGainConfig;
 using aidl::android::media::audio::common::AudioInputFlags;
 using aidl::android::media::audio::common::AudioIoFlags;
 using aidl::android::media::audio::common::AudioOutputFlags;
@@ -98,8 +100,7 @@
 }  // namespace
 
 Hal2AidlMapper::Hal2AidlMapper(const std::string& instance, const std::shared_ptr<IModule>& module)
-        : mInstance(instance), mModule(module) {
-}
+    : ConversionHelperAidl("Hal2AidlMapper", instance), mModule(module) {}
 
 void Hal2AidlMapper::addStream(
         const sp<StreamHalInterface>& stream, int32_t mixPortConfigId, int32_t patchId) {
@@ -136,9 +137,9 @@
     // 'sinks' will not be updated because 'setAudioPatch' only needs IDs. Here we log
     // the source arguments, where only the audio configuration and device specifications
     // are relevant.
-    ALOGD("%s: [disregard IDs] sources: %s, sinks: %s",
-            __func__, ::android::internal::ToString(sources).c_str(),
-            ::android::internal::ToString(sinks).c_str());
+    AUGMENT_LOG(D, "patch ID: %d, [disregard IDs] sources: %s, sinks: %s", *patchId,
+                ::android::internal::ToString(sources).c_str(),
+                ::android::internal::ToString(sinks).c_str());
     auto fillPortConfigs = [&](
             const std::vector<AudioPortConfig>& configs,
             const std::set<int32_t>& destinationPortIds,
@@ -151,18 +152,20 @@
                     // See b/315528763. Despite that the framework knows the actual format of
                     // the mix port, it still uses the original format. Luckily, there is
                     // the I/O handle which can be used to find the mix port.
-                    ALOGI("fillPortConfigs: retrying to find a mix port config with default "
-                            "configuration");
+                    AUGMENT_LOG(I,
+                                "fillPortConfigs: retrying to find a mix port config with"
+                                " default configuration");
                     if (auto it = findPortConfig(std::nullopt, s.flags,
                                     s.ext.get<AudioPortExt::mix>().handle);
                             it != mPortConfigs.end()) {
                         portConfig = it->second;
                     } else {
-                        const std::string flags = s.flags.has_value() ?
-                                s.flags->toString() : "<unspecified>";
-                        ALOGE("fillPortConfigs: existing port config for flags %s, handle %d "
-                                "not found in module %s", flags.c_str(),
-                                s.ext.get<AudioPortExt::mix>().handle, mInstance.c_str());
+                        const std::string flags =
+                                s.flags.has_value() ? s.flags->toString() : "<unspecified>";
+                        AUGMENT_LOG(E,
+                                    "fillPortConfigs: existing port config for flags %s, "
+                                    " handle %d not found",
+                                    flags.c_str(), s.ext.get<AudioPortExt::mix>().handle);
                         return BAD_VALUE;
                     }
                 } else {
@@ -170,8 +173,8 @@
                 }
             }
             LOG_ALWAYS_FATAL_IF(portConfig.id == 0,
-                    "fillPortConfigs: initial config: %s, port config: %s",
-                    s.toString().c_str(), portConfig.toString().c_str());
+                                "fillPortConfigs: initial config: %s, port config: %s",
+                                s.toString().c_str(), portConfig.toString().c_str());
             ids->push_back(portConfig.id);
             if (portIds != nullptr) {
                 portIds->insert(portConfig.portId);
@@ -181,7 +184,9 @@
     };
     // When looking up port configs, the destinationPortId is only used for mix ports.
     // Thus, we process device port configs first, and look up the destination port ID from them.
-    bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+    const bool sourceIsDevice = std::any_of(sources.begin(), sources.end(),
+            [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
+    const bool sinkIsDevice = std::any_of(sinks.begin(), sinks.end(),
             [](const auto& config) { return config.ext.getTag() == AudioPortExt::device; });
     const std::vector<AudioPortConfig>& devicePortConfigs =
             sourceIsDevice ? sources : sinks;
@@ -202,10 +207,29 @@
         existingPatchIt->second = patch;
     } else {
         bool created = false;
-        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, &patch, &created));
+        // When the framework does not specify a patch ID, only the mix port config
+        // is used for finding an existing patch. That's because the framework assumes
+        // that there can only be one patch for an I/O thread.
+        PatchMatch match = sourceIsDevice && sinkIsDevice ?
+                MATCH_BOTH : (sourceIsDevice ? MATCH_SINKS : MATCH_SOURCES);
+        auto requestedPatch = patch;
+        RETURN_STATUS_IF_ERROR(findOrCreatePatch(patch, match,
+                                                 &patch, &created));
         // No cleanup of the patch is needed, it is managed by the framework.
         *patchId = patch.id;
         if (!created) {
+            requestedPatch.id = patch.id;
+            if (patch != requestedPatch) {
+                AUGMENT_LOG(I, "Updating transient patch. Current: %s, new: %s",
+                            patch.toString().c_str(), requestedPatch.toString().c_str());
+                // Since matching may be done by mix port only, update the patch if the device port
+                // config has changed.
+                patch = requestedPatch;
+                RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(
+                                mModule->setAudioPatch(patch, &patch)));
+                existingPatchIt = mPatches.find(patch.id);
+                existingPatchIt->second = patch;
+            }
             // The framework might have "created" a patch which already existed due to
             // stream creation. Need to release the ownership from the stream.
             for (auto& s : mStreams) {
@@ -230,7 +254,7 @@
     int32_t id = result->id;
     if (requestedPortConfig.id != 0 && requestedPortConfig.id != id) {
         LOG_ALWAYS_FATAL("%s: requested port config id %d changed to %d", __func__,
-                requestedPortConfig.id, id);
+                         requestedPortConfig.id, id);
     }
 
     auto [_, inserted] = mPortConfigs.insert_or_assign(id, *result);
@@ -250,8 +274,8 @@
         RETURN_STATUS_IF_ERROR(createOrUpdatePortConfig(suggestedOrAppliedPortConfig,
                         &appliedPortConfig, created));
         if (appliedPortConfig.id == 0) {
-            ALOGE("%s: module %s did not apply suggested config %s", __func__,
-                    mInstance.c_str(), suggestedOrAppliedPortConfig.toString().c_str());
+            AUGMENT_LOG(E, "did not apply suggested config %s",
+                        suggestedOrAppliedPortConfig.toString().c_str());
             return NO_INIT;
         }
         *result = appliedPortConfig;
@@ -267,25 +291,25 @@
     if (mDisconnectedPortReplacement.first == portId) {
         const auto& port = mDisconnectedPortReplacement.second;
         mPorts.insert(std::make_pair(port.id, port));
-        ALOGD("%s: disconnected port replacement: %s", __func__, port.toString().c_str());
+        AUGMENT_LOG(D, "disconnected port replacement: %s", port.toString().c_str());
         mDisconnectedPortReplacement = std::pair<int32_t, AudioPort>();
     }
     updateDynamicMixPorts();
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
-        const AudioPatch& requestedPatch, AudioPatch* patch, bool* created) {
+        const AudioPatch& requestedPatch, PatchMatch match, AudioPatch* patch, bool* created) {
     std::set<int32_t> sourcePortConfigIds(requestedPatch.sourcePortConfigIds.begin(),
             requestedPatch.sourcePortConfigIds.end());
     std::set<int32_t> sinkPortConfigIds(requestedPatch.sinkPortConfigIds.begin(),
             requestedPatch.sinkPortConfigIds.end());
-    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, patch, created);
+    return findOrCreatePatch(sourcePortConfigIds, sinkPortConfigIds, match, patch, created);
 }
 
 status_t Hal2AidlMapper::findOrCreatePatch(
         const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
-        AudioPatch* patch, bool* created) {
-    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds);
+        PatchMatch match, AudioPatch* patch, bool* created) {
+    auto patchIt = findPatch(sourcePortConfigIds, sinkPortConfigIds, match);
     if (patchIt == mPatches.end()) {
         AudioPatch requestedPatch, appliedPatch;
         requestedPatch.sourcePortConfigIds.insert(requestedPatch.sourcePortConfigIds.end(),
@@ -304,13 +328,12 @@
 }
 
 status_t Hal2AidlMapper::findOrCreateDevicePortConfig(
-        const AudioDevice& device, const AudioConfig* config, AudioPortConfig* portConfig,
-        bool* created) {
+        const AudioDevice& device, const AudioConfig* config, const AudioGainConfig* gainConfig,
+        AudioPortConfig* portConfig, bool* created) {
     if (auto portConfigIt = findPortConfig(device); portConfigIt == mPortConfigs.end()) {
         auto portsIt = findPort(device);
         if (portsIt == mPorts.end()) {
-            ALOGE("%s: device port for device %s is not found in the module %s",
-                    __func__, device.toString().c_str(), mInstance.c_str());
+            AUGMENT_LOG(E, "device port for device %s is not found", device.toString().c_str());
             return BAD_VALUE;
         }
         AudioPortConfig requestedPortConfig;
@@ -318,12 +341,18 @@
         if (config != nullptr) {
             setPortConfigFromConfig(&requestedPortConfig, *config);
         }
+        if (gainConfig != nullptr) {
+            requestedPortConfig.gain = *gainConfig;
+        }
         return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
     } else {
         AudioPortConfig requestedPortConfig = portConfigIt->second;
         if (config != nullptr) {
             setPortConfigFromConfig(&requestedPortConfig, *config);
         }
+        if (gainConfig != nullptr) {
+            requestedPortConfig.gain = *gainConfig;
+        }
 
         if (requestedPortConfig != portConfigIt->second) {
             return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
@@ -339,16 +368,21 @@
         const AudioConfig& config, const std::optional<AudioIoFlags>& flags, int32_t ioHandle,
         AudioSource source, const std::set<int32_t>& destinationPortIds,
         AudioPortConfig* portConfig, bool* created) {
-    // These flags get removed one by one in this order when retrying port finding.
-    static const std::vector<AudioInputFlags> kOptionalInputFlags{
-        AudioInputFlags::FAST, AudioInputFlags::RAW, AudioInputFlags::VOIP_TX };
     if (auto portConfigIt = findPortConfig(config, flags, ioHandle);
             portConfigIt == mPortConfigs.end() && flags.has_value()) {
-        auto optionalInputFlagsIt = kOptionalInputFlags.begin();
+        // These input flags get removed one by one in this order when retrying port finding.
+        std::vector<AudioInputFlags> optionalInputFlags {
+            AudioInputFlags::FAST, AudioInputFlags::RAW, AudioInputFlags::VOIP_TX };
+        // For remote submix input, retry with direct input flag removed as the remote submix
+        // input is not expected to manipulate the contents of the audio stream.
+        if (mRemoteSubmixIn.has_value()) {
+            optionalInputFlags.push_back(AudioInputFlags::DIRECT);
+        }
+        auto optionalInputFlagsIt = optionalInputFlags.begin();
         AudioIoFlags matchFlags = flags.value();
         auto portsIt = findPort(config, matchFlags, destinationPortIds);
         while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::input
-                && optionalInputFlagsIt != kOptionalInputFlags.end()) {
+                && optionalInputFlagsIt != optionalInputFlags.end()) {
             if (!isBitPositionFlagSet(
                             matchFlags.get<AudioIoFlags::Tag::input>(), *optionalInputFlagsIt)) {
                 ++optionalInputFlagsIt;
@@ -357,15 +391,45 @@
             matchFlags.set<AudioIoFlags::Tag::input>(matchFlags.get<AudioIoFlags::Tag::input>() &
                     ~makeBitPositionFlagMask(*optionalInputFlagsIt++));
             portsIt = findPort(config, matchFlags, destinationPortIds);
-            ALOGI("%s: mix port for config %s, flags %s was not found in the module %s, "
-                    "retried with flags %s", __func__, config.toString().c_str(),
-                    flags.value().toString().c_str(), mInstance.c_str(),
-                    matchFlags.toString().c_str());
+            AUGMENT_LOG(I,
+                        "mix port for config %s, flags %s was not found"
+                        "retried with flags %s",
+                        config.toString().c_str(), flags.value().toString().c_str(),
+                        matchFlags.toString().c_str());
         }
+        // These output flags get removed one by one in this order when retrying port finding.
+        std::vector<AudioOutputFlags> optionalOutputFlags { };
+        // For remote submix output, retry with these output flags removed one by one:
+        // 1. DIRECT: remote submix outputs are expected not to manipulate the contents of the
+        //            audio stream.
+        // 2. IEC958_NONAUDIO: remote submix outputs are not connected to ALSA and do not require
+        //                     non audio signalling.
+        if (mRemoteSubmixOut.has_value()) {
+            optionalOutputFlags.push_back(AudioOutputFlags::DIRECT);
+            optionalOutputFlags.push_back(AudioOutputFlags::IEC958_NONAUDIO);
+        }
+        auto optionalOutputFlagsIt = optionalOutputFlags.begin();
+        matchFlags = flags.value();
+        while (portsIt == mPorts.end() && matchFlags.getTag() == AudioIoFlags::Tag::output
+                && optionalOutputFlagsIt != optionalOutputFlags.end()) {
+            if (!isBitPositionFlagSet(
+                            matchFlags.get<AudioIoFlags::Tag::output>(),*optionalOutputFlagsIt)) {
+                ++optionalOutputFlagsIt;
+                continue;
+            }
+            matchFlags.set<AudioIoFlags::Tag::output>(matchFlags.get<AudioIoFlags::Tag::output>() &
+                    ~makeBitPositionFlagMask(*optionalOutputFlagsIt++));
+            portsIt = findPort(config, matchFlags, destinationPortIds);
+            AUGMENT_LOG(I,
+                        "mix port for config %s, flags %s was not found"
+                        "retried with flags %s",
+                        config.toString().c_str(), flags.value().toString().c_str(),
+                        matchFlags.toString().c_str());
+        }
+
         if (portsIt == mPorts.end()) {
-            ALOGE("%s: mix port for config %s, flags %s is not found in the module %s",
-                    __func__, config.toString().c_str(), matchFlags.toString().c_str(),
-                    mInstance.c_str());
+            AUGMENT_LOG(E, "mix port for config %s, flags %s is not found",
+                        config.toString().c_str(), matchFlags.toString().c_str());
             return BAD_VALUE;
         }
         AudioPortConfig requestedPortConfig;
@@ -380,9 +444,10 @@
         }
         return createOrUpdatePortConfig(requestedPortConfig, portConfig, created);
     } else if (portConfigIt == mPortConfigs.end() && !flags.has_value()) {
-        ALOGW("%s: mix port config for %s, handle %d not found in the module %s, "
-                "and was not created as flags are not specified",
-                __func__, config.toString().c_str(), ioHandle, mInstance.c_str());
+        AUGMENT_LOG(W,
+                    "mix port config for %s, handle %d not found "
+                    "and was not created as flags are not specified",
+                    config.toString().c_str(), ioHandle);
         return BAD_VALUE;
     } else {
         AudioPortConfig requestedPortConfig = portConfigIt->second;
@@ -412,8 +477,8 @@
         if (const auto& p = requestedPortConfig;
                 !p.sampleRate.has_value() || !p.channelMask.has_value() ||
                 !p.format.has_value()) {
-            ALOGW("%s: provided mix port config is not fully specified: %s",
-                    __func__, p.toString().c_str());
+            AUGMENT_LOG(W, "provided mix port config is not fully specified: %s",
+                        p.toString().c_str());
             return BAD_VALUE;
         }
         AudioConfig config;
@@ -426,22 +491,29 @@
                 requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
                 portConfig, created);
     } else if (requestedPortConfig.ext.getTag() == Tag::device) {
-        if (const auto& p = requestedPortConfig;
-                p.sampleRate.has_value() && p.channelMask.has_value() &&
-                p.format.has_value()) {
-            AudioConfig config;
-            setConfigFromPortConfig(&config, requestedPortConfig);
+        const auto& p = requestedPortConfig;
+        const bool hasAudioConfig =
+                p.sampleRate.has_value() && p.channelMask.has_value() && p.format.has_value();
+        const bool hasGainConfig = p.gain.has_value();
+        if (hasAudioConfig || hasGainConfig) {
+            AudioConfig config, *configPtr = nullptr;
+            if (hasAudioConfig) {
+                setConfigFromPortConfig(&config, requestedPortConfig);
+                configPtr = &config;
+            }
+            const AudioGainConfig* gainConfigPtr = nullptr;
+            if (hasGainConfig) gainConfigPtr = &(*(p.gain));
             return findOrCreateDevicePortConfig(
-                    requestedPortConfig.ext.get<Tag::device>().device, &config,
+                    requestedPortConfig.ext.get<Tag::device>().device, configPtr, gainConfigPtr,
                     portConfig, created);
         } else {
+            AUGMENT_LOG(D, "device port config does not have audio or gain config specified");
             return findOrCreateDevicePortConfig(
                     requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
-                    portConfig, created);
+                    nullptr /*gainConfig*/, portConfig, created);
         }
     }
-    ALOGW("%s: unsupported audio port config: %s",
-            __func__, requestedPortConfig.toString().c_str());
+    AUGMENT_LOG(W, "unsupported audio port config: %s", requestedPortConfig.toString().c_str());
     return BAD_VALUE;
 }
 
@@ -450,13 +522,13 @@
         *portConfig = it->second;
         return OK;
     }
-    ALOGE("%s: could not find a device port config for device %s",
-            __func__, device.toString().c_str());
+    AUGMENT_LOG(E, "could not find a device port config for device %s", device.toString().c_str());
     return BAD_VALUE;
 }
 
 Hal2AidlMapper::Patches::iterator Hal2AidlMapper::findPatch(
-        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds) {
+        const std::set<int32_t>& sourcePortConfigIds, const std::set<int32_t>& sinkPortConfigIds,
+        PatchMatch match) {
     return std::find_if(mPatches.begin(), mPatches.end(),
             [&](const auto& pair) {
                 const auto& p = pair.second;
@@ -464,7 +536,15 @@
                         p.sourcePortConfigIds.begin(), p.sourcePortConfigIds.end());
                 std::set<int32_t> patchSinks(
                         p.sinkPortConfigIds.begin(), p.sinkPortConfigIds.end());
-                return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks; });
+                switch (match) {
+                    case MATCH_SOURCES:
+                        return sourcePortConfigIds == patchSrcs;
+                    case MATCH_SINKS:
+                        return sinkPortConfigIds == patchSinks;
+                    case MATCH_BOTH:
+                        return sourcePortConfigIds == patchSrcs && sinkPortConfigIds == patchSinks;
+                }
+            });
 }
 
 Hal2AidlMapper::Ports::iterator Hal2AidlMapper::findPort(const AudioDevice& device) {
@@ -548,9 +628,10 @@
             }
             optionalFlags |= makeBitPositionFlagMask(*optionalOutputFlagsIt++);
             result = std::find_if(mPorts.begin(), mPorts.end(), matcher);
-            ALOGI("%s: port for config %s, flags %s was not found in the module %s, "
-                  "retried with excluding optional flags %#x", __func__, config.toString().c_str(),
-                    flags.toString().c_str(), mInstance.c_str(), optionalFlags);
+            AUGMENT_LOG(I,
+                        "port for config %s, flags %s was not found "
+                        "retried with excluding optional flags %#x",
+                        config.toString().c_str(), flags.toString().c_str(), optionalFlags);
         }
     }
     return result;
@@ -584,7 +665,7 @@
 status_t Hal2AidlMapper::getAudioMixPort(int32_t ioHandle, AudioPort* port) {
     auto it = findPortConfig(std::nullopt /*config*/, std::nullopt /*flags*/, ioHandle);
     if (it == mPortConfigs.end()) {
-        ALOGE("%s, cannot find mix port config for handle %u", __func__, ioHandle);
+        AUGMENT_LOG(E, "cannot find mix port config for handle %u", ioHandle);
         return BAD_VALUE;
     }
     return updateAudioPort(it->second.portId, port);
@@ -593,21 +674,18 @@
 status_t Hal2AidlMapper::getAudioPortCached(
         const ::aidl::android::media::audio::common::AudioDevice& device,
         ::aidl::android::media::audio::common::AudioPort* port) {
-
     if (auto portsIt = findPort(device); portsIt != mPorts.end()) {
         *port = portsIt->second;
         return OK;
     }
-    ALOGE("%s: device port for device %s is not found in the module %s",
-            __func__, device.toString().c_str(), mInstance.c_str());
+    AUGMENT_LOG(E, "device port for device %s is not found", device.toString().c_str());
     return BAD_VALUE;
 }
 
 status_t Hal2AidlMapper::initialize() {
     std::vector<AudioPort> ports;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->getAudioPorts(&ports)));
-    ALOGW_IF(ports.empty(), "%s: module %s returned an empty list of audio ports",
-            __func__, mInstance.c_str());
+    AUGMENT_LOG_IF(W, ports.empty(), "returned an empty list of audio ports");
     mDefaultInputPortId = mDefaultOutputPortId = -1;
     const int defaultDeviceFlag = 1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE;
     for (auto it = ports.begin(); it != ports.end(); ) {
@@ -640,8 +718,9 @@
         }
     }
     if (mRemoteSubmixIn.has_value() != mRemoteSubmixOut.has_value()) {
-        ALOGE("%s: The configuration only has input or output remote submix device, must have both",
-                __func__);
+        AUGMENT_LOG(E,
+                    "The configuration only has input or output remote submix device, "
+                    "must have both");
         mRemoteSubmixIn.reset();
         mRemoteSubmixOut.reset();
     }
@@ -649,7 +728,7 @@
         AudioPort connectedRSubmixIn = *mRemoteSubmixIn;
         connectedRSubmixIn.ext.get<AudioPortExt::Tag::device>().device.address =
                 AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS;
-        ALOGD("%s: connecting remote submix input", __func__);
+        AUGMENT_LOG(D, "connecting remote submix input");
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
                                 connectedRSubmixIn, &connectedRSubmixIn)));
         // The template port for the remote submix input couldn't be "default" because it is not
@@ -666,7 +745,7 @@
         AudioPort tempConnectedRSubmixOut = *mRemoteSubmixOut;
         tempConnectedRSubmixOut.ext.get<AudioPortExt::Tag::device>().device.address =
                 AUDIO_REMOTE_SUBMIX_DEVICE_ADDRESS;
-        ALOGD("%s: temporarily connecting and disconnecting remote submix output", __func__);
+        AUGMENT_LOG(D, "temporarily connecting and disconnecting remote submix output");
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
                                 tempConnectedRSubmixOut, &tempConnectedRSubmixOut)));
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->disconnectExternalDevice(
@@ -675,8 +754,8 @@
         ports.push_back(std::move(tempConnectedRSubmixOut));
     }
 
-    ALOGI("%s: module %s default port ids: input %d, output %d",
-            __func__, mInstance.c_str(), mDefaultInputPortId, mDefaultOutputPortId);
+    AUGMENT_LOG(I, "default port ids: input %d, output %d", mDefaultInputPortId,
+                mDefaultOutputPortId);
     std::transform(ports.begin(), ports.end(), std::inserter(mPorts, mPorts.end()),
             [](const auto& p) { return std::make_pair(p.id, p); });
     RETURN_STATUS_IF_ERROR(updateRoutes());
@@ -729,17 +808,17 @@
         int32_t ioHandle, const AudioDevice& device, const AudioIoFlags& flags,
         AudioSource source, Cleanups* cleanups, AudioConfig* config,
         AudioPortConfig* mixPortConfig, AudioPatch* patch) {
-    ALOGD("%p %s: handle %d, device %s, flags %s, source %s, config %s, mix port config %s",
-            this, __func__, ioHandle, device.toString().c_str(),
-            flags.toString().c_str(), toString(source).c_str(),
-            config->toString().c_str(), mixPortConfig->toString().c_str());
+    AUGMENT_LOG(D, "handle %d, device %s, flags %s, source %s, config %s, mixport config %s",
+                ioHandle, device.toString().c_str(), flags.toString().c_str(),
+                toString(source).c_str(), config->toString().c_str(),
+                mixPortConfig->toString().c_str());
     resetUnusedPatchesAndPortConfigs();
     const AudioConfig initialConfig = *config;
     // Find / create AudioPortConfigs for the device port and the mix port,
     // then find / create a patch between them, and open a stream on the mix port.
     AudioPortConfig devicePortConfig;
     bool created = false;
-    RETURN_STATUS_IF_ERROR(findOrCreateDevicePortConfig(device, config,
+    RETURN_STATUS_IF_ERROR(findOrCreateDevicePortConfig(device, config, nullptr /*gainConfig*/,
                     &devicePortConfig, &created));
     LOG_ALWAYS_FATAL_IF(devicePortConfig.id == 0);
     if (created) {
@@ -748,15 +827,16 @@
     status_t status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
             devicePortConfig.id, flags, source, initialConfig, cleanups, config,
             mixPortConfig, patch);
-    if (status != OK) {
+    if (status != OK && !(mRemoteSubmixOut.has_value() &&
+                initialConfig.base.format.type != AudioFormatType::PCM)) {
         // If using the client-provided config did not work out for establishing a mix port config
         // or patching, try with the device port config. Note that in general device port config and
         // mix port config are not required to be the same, however they must match if the HAL
         // module can't perform audio stream conversions.
         AudioConfig deviceConfig = initialConfig;
         if (setConfigFromPortConfig(&deviceConfig, devicePortConfig)->base != initialConfig.base) {
-            ALOGD("%s: retrying with device port config: %s", __func__,
-                    devicePortConfig.toString().c_str());
+            AUGMENT_LOG(D, "retrying with device port config: %s",
+                        devicePortConfig.toString().c_str());
             status = prepareToOpenStreamHelper(ioHandle, devicePortConfig.portId,
                     devicePortConfig.id, flags, source, initialConfig, cleanups,
                     &deviceConfig, mixPortConfig, patch);
@@ -800,8 +880,8 @@
         retryWithSuggestedConfig = true;
     }
     if (mixPortConfig->id == 0 && retryWithSuggestedConfig) {
-        ALOGD("%s: retrying to find/create a mix port config using config %s", __func__,
-                config->toString().c_str());
+        AUGMENT_LOG(D, "retrying to find/create a mix port config using config %s",
+                    config->toString().c_str());
         RETURN_STATUS_IF_ERROR(findOrCreateMixPortConfig(*config, flags, ioHandle, source,
                         std::set<int32_t>{devicePortId}, mixPortConfig, &created));
         if (created) {
@@ -810,16 +890,16 @@
         setConfigFromPortConfig(config, *mixPortConfig);
     }
     if (mixPortConfig->id == 0) {
-        ALOGD("%p %s: returning suggested config for the stream: %s", this, __func__,
-                config->toString().c_str());
+        AUGMENT_LOG(D, "returning suggested config for the stream: %s",
+                    config->toString().c_str());
         return OK;
     }
     if (isInput) {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {devicePortConfigId}, {mixPortConfig->id}, patch, &created));
+                        {devicePortConfigId}, {mixPortConfig->id}, MATCH_BOTH, patch, &created));
     } else {
         RETURN_STATUS_IF_ERROR(findOrCreatePatch(
-                        {mixPortConfig->id}, {devicePortConfigId}, patch, &created));
+                        {mixPortConfig->id}, {devicePortConfigId}, MATCH_BOTH, patch, &created));
     }
     if (created) {
         cleanups->add(&Hal2AidlMapper::resetPatch, patch->id);
@@ -849,9 +929,10 @@
 // Note: does not reset port configs.
 status_t Hal2AidlMapper::releaseAudioPatch(Patches::iterator it) {
     const int32_t patchId = it->first;
+    AUGMENT_LOG(D, "patchId %d", patchId);
     if (ndk::ScopedAStatus status = mModule->resetAudioPatch(patchId); !status.isOk()) {
-        ALOGE("%s: error while resetting patch %d: %s",
-                __func__, patchId, status.getDescription().c_str());
+        AUGMENT_LOG(E, "error while resetting patch %d: %s", patchId,
+                    status.getDescription().c_str());
         return statusTFromBinderStatus(status);
     }
     mPatches.erase(it);
@@ -870,7 +951,7 @@
         if (auto it = mPatches.find(patchId); it != mPatches.end()) {
             releaseAudioPatch(it);
         } else {
-            ALOGE("%s: patch id %d not found", __func__, patchId);
+            AUGMENT_LOG(E, "patch id %d not found", patchId);
             result = BAD_VALUE;
         }
     }
@@ -880,15 +961,17 @@
 
 void Hal2AidlMapper::resetPortConfig(int32_t portConfigId) {
     if (auto it = mPortConfigs.find(portConfigId); it != mPortConfigs.end()) {
+        AUGMENT_LOG(D, "%s", it->second.toString().c_str());
         if (ndk::ScopedAStatus status = mModule->resetAudioPortConfig(portConfigId);
                 !status.isOk()) {
-            ALOGE("%s: error while resetting port config %d: %s",
-                    __func__, portConfigId, status.getDescription().c_str());
+            AUGMENT_LOG(E, "error while resetting port config %d: %s", portConfigId,
+                        status.getDescription().c_str());
+            return;
         }
         mPortConfigs.erase(it);
         return;
     }
-    ALOGE("%s: port config id %d not found", __func__, portConfigId);
+    AUGMENT_LOG(E, "port config id %d not found", portConfigId);
 }
 
 void Hal2AidlMapper::resetUnusedPatchesAndPortConfigs() {
@@ -933,6 +1016,8 @@
 }
 
 status_t Hal2AidlMapper::setDevicePortConnectedState(const AudioPort& devicePort, bool connected) {
+    AUGMENT_LOG(D, "state %s, device %s", (connected ? "connected" : "disconnected"),
+                devicePort.toString().c_str());
     resetUnusedPatchesAndPortConfigs();
     if (connected) {
         AudioDevice matchDevice = devicePort.ext.get<AudioPortExt::device>().device;
@@ -963,8 +1048,7 @@
                 // port not found in every one of them.
                 return BAD_VALUE;
             } else {
-                ALOGD("%s: device port for device %s found in the module %s",
-                        __func__, matchDevice.toString().c_str(), mInstance.c_str());
+                AUGMENT_LOG(D, "device port for device %s found", matchDevice.toString().c_str());
             }
             templatePort = portsIt->second;
         }
@@ -975,10 +1059,9 @@
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->connectExternalDevice(
                                 connectedPort, &connectedPort)));
         const auto [it, inserted] = mPorts.insert(std::make_pair(connectedPort.id, connectedPort));
-        LOG_ALWAYS_FATAL_IF(!inserted,
-                "%s: module %s, duplicate port ID received from HAL: %s, existing port: %s",
-                __func__, mInstance.c_str(), connectedPort.toString().c_str(),
-                it->second.toString().c_str());
+        LOG_ALWAYS_FATAL_IF(
+                !inserted, "%s duplicate port ID received from HAL: %s, existing port: %s",
+                __func__, connectedPort.toString().c_str(), it->second.toString().c_str());
         mConnectedPorts.insert(connectedPort.id);
         if (erasePortAfterConnectionIt != mPorts.end()) {
             mPorts.erase(erasePortAfterConnectionIt);
@@ -991,8 +1074,7 @@
             // port not found in every one of them.
             return BAD_VALUE;
         } else {
-            ALOGD("%s: device port for device %s found in the module %s",
-                    __func__, matchDevice.toString().c_str(), mInstance.c_str());
+            AUGMENT_LOG(D, "device port for device %s found", matchDevice.toString().c_str());
         }
 
         // Disconnection of remote submix out with address "0" is a special case. We need to replace
@@ -1048,8 +1130,8 @@
             }
             portIt->second = *port;
         } else {
-            ALOGW("%s, port(%d) returned successfully from the HAL but not it is not cached",
-                  __func__, portId);
+            AUGMENT_LOG(W, "port(%d) returned successfully from the HAL but not it is not cached",
+                        portId);
         }
     }
     return status;
@@ -1058,8 +1140,7 @@
 status_t Hal2AidlMapper::updateRoutes() {
     RETURN_STATUS_IF_ERROR(
             statusTFromBinderStatus(mModule->getAudioRoutes(&mRoutes)));
-    ALOGW_IF(mRoutes.empty(), "%s: module %s returned an empty list of audio routes",
-            __func__, mInstance.c_str());
+    AUGMENT_LOG_IF(W, mRoutes.empty(), "returned an empty list of audio routes");
     if (mRemoteSubmixIn.has_value()) {
         // Remove mentions of the template remote submix input from routes.
         int32_t rSubmixInId = mRemoteSubmixIn->id;
@@ -1100,7 +1181,7 @@
             updateAudioPort(portId, &it->second);
         } else {
             // This must not happen
-            ALOGE("%s, cannot find port for id=%d", __func__, portId);
+            AUGMENT_LOG(E, "cannot find port for id=%d", portId);
         }
     }
 }
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index f302c23..2548752 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -26,6 +26,7 @@
 #include <media/AidlConversionUtil.h>
 
 #include "Cleanups.h"
+#include "ConversionHelperAidl.h"
 
 namespace android {
 
@@ -41,7 +42,7 @@
 // but still consider some of the outputs to be valid (for example, in 'open{Input|Output}Stream'),
 // 'Hal2AidlMapper' follows the Binder convention. It means that if a method returns an error,
 // the outputs may not be initialized at all and should not be considered by the caller.
-class Hal2AidlMapper {
+class Hal2AidlMapper : public ConversionHelperAidl {
   public:
     using Cleanups = Cleanups<Hal2AidlMapper>;
 
@@ -133,7 +134,8 @@
     using Streams = std::map<wp<StreamHalInterface>,
             std::pair<int32_t /*mix port config ID*/, int32_t /*patch ID*/>>;
 
-    const std::string mInstance;
+    enum PatchMatch { MATCH_SOURCES, MATCH_SINKS, MATCH_BOTH };
+
     const std::shared_ptr<::aidl::android::hardware::audio::core::IModule> mModule;
 
     bool audioDeviceMatches(const ::aidl::android::media::audio::common::AudioDevice& device,
@@ -150,15 +152,18 @@
             ::aidl::android::media::audio::common::AudioPortConfig* result, bool *created);
     void eraseConnectedPort(int32_t portId);
     status_t findOrCreatePatch(
-        const std::set<int32_t>& sourcePortConfigIds,
-        const std::set<int32_t>& sinkPortConfigIds,
+            const std::set<int32_t>& sourcePortConfigIds,
+            const std::set<int32_t>& sinkPortConfigIds,
+            PatchMatch match,
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreatePatch(
         const ::aidl::android::hardware::audio::core::AudioPatch& requestedPatch,
+        PatchMatch match,
         ::aidl::android::hardware::audio::core::AudioPatch* patch, bool* created);
     status_t findOrCreateDevicePortConfig(
             const ::aidl::android::media::audio::common::AudioDevice& device,
             const ::aidl::android::media::audio::common::AudioConfig* config,
+            const ::aidl::android::media::audio::common::AudioGainConfig* gainConfig,
             ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
             bool* created);
     // If the resulting 'portConfig->id' is 0, that means the config was not created,
@@ -175,7 +180,7 @@
         const std::set<int32_t>& destinationPortIds,
         ::aidl::android::media::audio::common::AudioPortConfig* portConfig, bool* created);
     Patches::iterator findPatch(const std::set<int32_t>& sourcePortConfigIds,
-            const std::set<int32_t>& sinkPortConfigIds);
+            const std::set<int32_t>& sinkPortConfigIds, PatchMatch match);
     Ports::iterator findPort(const ::aidl::android::media::audio::common::AudioDevice& device);
     Ports::iterator findPort(
             const ::aidl::android::media::audio::common::AudioConfig& config,
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 46f4f13..d65701a 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -32,6 +32,7 @@
 #include <Utils.h>
 #include <utils/Log.h>
 
+#include "AidlUtils.h"
 #include "DeviceHalAidl.h"
 #include "EffectHalAidl.h"
 #include "StreamHalAidl.h"
@@ -58,6 +59,16 @@
 template<HalCommand::Tag cmd, typename T> HalCommand makeHalCommand(T data) {
     return HalCommand::make<cmd>(data);
 }
+
+template <typename MQTypeError>
+auto fmqErrorHandler(const char* mqName) {
+    return [m = std::string(mqName)](MQTypeError fmqError, std::string&& errorMessage) {
+        mediautils::TimeCheck::signalAudioHals();
+        LOG_ALWAYS_FATAL_IF(fmqError != MQTypeError::NONE, "%s: %s",
+                m.c_str(), errorMessage.c_str());
+    };
+}
+
 }  // namespace
 
 // static
@@ -74,23 +85,23 @@
     return streamCommon;
 }
 
-StreamHalAidl::StreamHalAidl(
-        std::string_view className, bool isInput, const audio_config& config,
-        int32_t nominalLatency, StreamContextAidl&& context,
-        const std::shared_ptr<IStreamCommon>& stream,
-        const std::shared_ptr<IHalAdapterVendorExtension>& vext)
-        : ConversionHelperAidl(className),
+StreamHalAidl::StreamHalAidl(std::string_view className, bool isInput, const audio_config& config,
+                             int32_t nominalLatency, StreamContextAidl&& context,
+                             const std::shared_ptr<IStreamCommon>& stream,
+                             const std::shared_ptr<IHalAdapterVendorExtension>& vext)
+    : ConversionHelperAidl(className, std::string(isInput ? "in" : "out") + "|ioHandle:" +
+            std::to_string(context.getIoHandle())),
           mIsInput(isInput),
           mConfig(configToBase(config)),
           mContext(std::move(context)),
           mStream(stream),
           mVendorExt(vext),
           mLastReplyLifeTimeNs(
-                  std::min(static_cast<size_t>(100),
-                          2 * mContext.getBufferDurationMs(mConfig.sample_rate))
+                  std::min(static_cast<size_t>(20),
+                           mContext.getBufferDurationMs(mConfig.sample_rate))
                   * NANOS_PER_MILLISECOND)
 {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     {
         std::lock_guard l(mLock);
         mLastReply.latencyMs = nominalLatency;
@@ -102,18 +113,29 @@
             StreamHalAidl::getAudioProperties(&config) == NO_ERROR) {
         mStreamPowerLog.init(config.sample_rate, config.channel_mask, config.format);
     }
+
+    if (mStream != nullptr) {
+        mContext.getCommandMQ()->setErrorHandler(
+                fmqErrorHandler<StreamContextAidl::CommandMQ::Error>("CommandMQ"));
+        mContext.getReplyMQ()->setErrorHandler(
+                fmqErrorHandler<StreamContextAidl::ReplyMQ::Error>("ReplyMQ"));
+        if (mContext.getDataMQ() != nullptr) {
+            mContext.getDataMQ()->setErrorHandler(
+                    fmqErrorHandler<StreamContextAidl::DataMQ::Error>("DataMQ"));
+        }
+    }
 }
 
 StreamHalAidl::~StreamHalAidl() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     if (mStream != nullptr) {
         ndk::ScopedAStatus status = mStream->close();
-        ALOGE_IF(!status.isOk(), "%s: status %s", __func__, status.getDescription().c_str());
+        AUGMENT_LOG_IF(E, !status.isOk(), "status %s", status.getDescription().c_str());
     }
 }
 
 status_t StreamHalAidl::getBufferSize(size_t *size) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     if (size == nullptr) {
         return BAD_VALUE;
     }
@@ -122,11 +144,12 @@
         return NO_INIT;
     }
     *size = mContext.getBufferSizeBytes();
+    AUGMENT_LOG(I, "size: %zu", *size);
     return OK;
 }
 
 status_t StreamHalAidl::getAudioProperties(audio_config_base_t *configBase) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     if (configBase == nullptr) {
         return BAD_VALUE;
     }
@@ -136,10 +159,11 @@
 }
 
 status_t StreamHalAidl::setParameters(const String8& kvPairs) {
+    AUGMENT_LOG(V);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     AudioParameter parameters(kvPairs);
-    ALOGD("%s: parameters: %s", __func__, parameters.toString().c_str());
+    AUGMENT_LOG(D, "parameters: %s", parameters.toString().c_str());
 
     (void)VALUE_OR_RETURN_STATUS(filterOutAndProcessParameter<int>(
                     parameters, String8(AudioParameter::keyStreamHwAvSync),
@@ -150,6 +174,7 @@
 }
 
 status_t StreamHalAidl::getParameters(const String8& keys __unused, String8 *values) {
+    AUGMENT_LOG(V);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (values == nullptr) {
@@ -161,7 +186,7 @@
 }
 
 status_t StreamHalAidl::getFrameSize(size_t *size) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     if (size == nullptr) {
         return BAD_VALUE;
     }
@@ -173,7 +198,7 @@
 }
 
 status_t StreamHalAidl::addEffect(sp<EffectHalInterface> effect) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (effect == nullptr) {
@@ -184,7 +209,7 @@
 }
 
 status_t StreamHalAidl::removeEffect(sp<EffectHalInterface> effect) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (effect == nullptr) {
@@ -195,27 +220,32 @@
 }
 
 status_t StreamHalAidl::standby() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     const auto state = getState();
     StreamDescriptor::Reply reply;
     switch (state) {
         case StreamDescriptor::State::ACTIVE:
+        case StreamDescriptor::State::DRAINING:
+        case StreamDescriptor::State::TRANSFERRING:
             RETURN_STATUS_IF_ERROR(pause(&reply));
-            if (reply.state != StreamDescriptor::State::PAUSED) {
-                ALOGE("%s: unexpected stream state: %s (expected PAUSED)",
-                        __func__, toString(reply.state).c_str());
+            if (reply.state != StreamDescriptor::State::PAUSED &&
+                    reply.state != StreamDescriptor::State::DRAIN_PAUSED &&
+                    reply.state != StreamDescriptor::State::TRANSFER_PAUSED) {
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected PAUSED)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             FALLTHROUGH_INTENDED;
         case StreamDescriptor::State::PAUSED:
         case StreamDescriptor::State::DRAIN_PAUSED:
+        case StreamDescriptor::State::TRANSFER_PAUSED:
             if (mIsInput) return flush();
             RETURN_STATUS_IF_ERROR(flush(&reply));
             if (reply.state != StreamDescriptor::State::IDLE) {
-                ALOGE("%s: unexpected stream state: %s (expected IDLE)",
-                        __func__, toString(reply.state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             FALLTHROUGH_INTENDED;
@@ -223,22 +253,22 @@
             RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::standby>(),
                             &reply, true /*safeFromNonWorkerThread*/));
             if (reply.state != StreamDescriptor::State::STANDBY) {
-                ALOGE("%s: unexpected stream state: %s (expected STANDBY)",
-                        __func__, toString(reply.state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected STANDBY)",
+                            toString(reply.state).c_str());
                 return INVALID_OPERATION;
             }
             FALLTHROUGH_INTENDED;
         case StreamDescriptor::State::STANDBY:
             return OK;
         default:
-            ALOGE("%s: not supported from %s stream state %s",
-                    __func__, mIsInput ? "input" : "output", toString(state).c_str());
+            AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+                        toString(state).c_str());
             return INVALID_OPERATION;
     }
 }
 
 status_t StreamHalAidl::dump(int fd, const Vector<String16>& args) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     Vector<String16> newArgs = args;
@@ -249,49 +279,101 @@
 }
 
 status_t StreamHalAidl::start() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    const auto state = getState();
-    StreamDescriptor::Reply reply;
-    if (state == StreamDescriptor::State::STANDBY) {
-        RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
-        return sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true);
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
     }
-
-    return INVALID_OPERATION;
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    switch (reply.state) {
+        case StreamDescriptor::State::STANDBY:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::IDLE) {
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE)",
+                            toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::IDLE:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+                            toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            FALLTHROUGH_INTENDED;
+        case StreamDescriptor::State::ACTIVE:
+            return OK;
+        case StreamDescriptor::State::DRAINING:
+            RETURN_STATUS_IF_ERROR(
+                    sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply, true));
+            if (reply.state != StreamDescriptor::State::ACTIVE) {
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+                            toString(reply.state).c_str());
+                return INVALID_OPERATION;
+            }
+            return OK;
+        default:
+            AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+                        toString(reply.state).c_str());
+            return INVALID_OPERATION;
+    }
 }
 
 status_t StreamHalAidl::stop() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
+    TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return standby();
+    if (!mContext.isMmapped()) {
+        return BAD_VALUE;
+    }
+    StreamDescriptor::Reply reply;
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    if (const auto state = reply.state; state == StreamDescriptor::State::ACTIVE) {
+        return drain(false /*earlyNotify*/, nullptr);
+    } else if (state == StreamDescriptor::State::DRAINING) {
+        RETURN_STATUS_IF_ERROR(pause());
+        return flush();
+    } else if (state == StreamDescriptor::State::PAUSED) {
+        return flush();
+    } else if (state != StreamDescriptor::State::IDLE &&
+            state != StreamDescriptor::State::STANDBY) {
+        AUGMENT_LOG(E, "not supported from %s stream state %s", mIsInput ? "input" : "output",
+                    toString(state).c_str());
+        return INVALID_OPERATION;
+    }
+    return OK;
 }
 
 status_t StreamHalAidl::getLatency(uint32_t *latency) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
     *latency = std::clamp(std::max<int32_t>(0, reply.latencyMs), 1, 3000);
-    ALOGW_IF(reply.latencyMs != static_cast<int32_t>(*latency),
-             "Suspicious latency value reported by HAL: %d, clamped to %u", reply.latencyMs,
-             *latency);
+    AUGMENT_LOG_IF(W, reply.latencyMs != static_cast<int32_t>(*latency),
+                   "Suspicious latency value reported by HAL: %d, clamped to %u", reply.latencyMs,
+                   *latency);
     return OK;
 }
 
-status_t StreamHalAidl::getObservablePosition(int64_t *frames, int64_t *timestamp) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+status_t StreamHalAidl::getObservablePosition(int64_t* frames, int64_t* timestamp,
+        StatePositions* statePositions) {
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
-    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
+    RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply, statePositions));
     *frames = std::max<int64_t>(0, reply.observable.frames);
     *timestamp = std::max<int64_t>(0, reply.observable.timeNs);
     return OK;
 }
 
 status_t StreamHalAidl::getHardwarePosition(int64_t *frames, int64_t *timestamp) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
@@ -301,7 +383,7 @@
 }
 
 status_t StreamHalAidl::getXruns(int32_t *frames) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     if (!mStream) return NO_INIT;
     StreamDescriptor::Reply reply;
     RETURN_STATUS_IF_ERROR(updateCountersIfNeeded(&reply));
@@ -310,7 +392,7 @@
 }
 
 status_t StreamHalAidl::transfer(void *buffer, size_t bytes, size_t *transferred) {
-    ALOGV("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(V);
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!mStream || mContext.getDataMQ() == nullptr) return NO_INIT;
     mWorkerTid.store(gettid(), std::memory_order_release);
@@ -322,8 +404,8 @@
         StreamDescriptor::Reply reply;
         RETURN_STATUS_IF_ERROR(sendCommand(makeHalCommand<HalCommand::Tag::start>(), &reply));
         if (reply.state != StreamDescriptor::State::IDLE) {
-            ALOGE("%s: failed to get the stream out of standby, actual state: %s",
-                    __func__, toString(reply.state).c_str());
+            AUGMENT_LOG(E, "failed to get the stream out of standby, actual state: %s",
+                        toString(reply.state).c_str());
             return INVALID_OPERATION;
         }
     }
@@ -334,7 +416,7 @@
             StreamDescriptor::Command::make<StreamDescriptor::Command::Tag::burst>(bytes);
     if (!mIsInput) {
         if (!mContext.getDataMQ()->write(static_cast<const int8_t*>(buffer), bytes)) {
-            ALOGE("%s: failed to write %zu bytes to data MQ", __func__, bytes);
+            AUGMENT_LOG(E, "failed to write %zu bytes to data MQ", bytes);
             return NOT_ENOUGH_DATA;
         }
     }
@@ -347,7 +429,7 @@
                 __func__, *transferred, bytes);
         if (auto toRead = mContext.getDataMQ()->availableToRead();
                 toRead != 0 && !mContext.getDataMQ()->read(static_cast<int8_t*>(buffer), toRead)) {
-            ALOGE("%s: failed to read %zu bytes to data MQ", __func__, toRead);
+            AUGMENT_LOG(E, "failed to read %zu bytes to data MQ", toRead);
             return NOT_ENOUGH_DATA;
         }
     }
@@ -356,15 +438,23 @@
 }
 
 status_t StreamHalAidl::pause(StreamDescriptor::Reply* reply) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return sendCommand(makeHalCommand<HalCommand::Tag::pause>(), reply,
-            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+
+    if (const auto state = getState(); isInPlayOrRecordState(state)) {
+        return sendCommand(
+                makeHalCommand<HalCommand::Tag::pause>(), reply,
+                true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+    } else {
+        AUGMENT_LOG(D, "already stream in one of the PAUSED kind of states, current state: %s",
+                toString(state).c_str());
+        return OK;
+    }
 }
 
 status_t StreamHalAidl::resume(StreamDescriptor::Reply* reply) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (mIsInput) {
@@ -378,23 +468,30 @@
             RETURN_STATUS_IF_ERROR(
                     sendCommand(makeHalCommand<HalCommand::Tag::burst>(0), innerReply));
             if (innerReply->state != StreamDescriptor::State::ACTIVE) {
-                ALOGE("%s: unexpected stream state: %s (expected ACTIVE)",
-                        __func__, toString(innerReply->state).c_str());
+                AUGMENT_LOG(E, "unexpected stream state: %s (expected ACTIVE)",
+                            toString(innerReply->state).c_str());
                 return INVALID_OPERATION;
             }
             return OK;
-        } else if (state == StreamDescriptor::State::PAUSED) {
+        } else if (state == StreamDescriptor::State::PAUSED ||
+                   state == StreamDescriptor::State::TRANSFER_PAUSED ||
+                   state == StreamDescriptor::State::DRAIN_PAUSED) {
             return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
+        } else if (state == StreamDescriptor::State::ACTIVE ||
+                   state == StreamDescriptor::State::TRANSFERRING ||
+                   state == StreamDescriptor::State::DRAINING) {
+            AUGMENT_LOG(D, "already in stream state: %s", toString(state).c_str());
+            return OK;
         } else {
-            ALOGE("%s: unexpected stream state: %s (expected IDLE or PAUSED)",
-                        __func__, toString(state).c_str());
+            AUGMENT_LOG(E, "unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
+                        toString(state).c_str());
             return INVALID_OPERATION;
         }
     }
 }
 
 status_t StreamHalAidl::drain(bool earlyNotify, StreamDescriptor::Reply* reply) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     return sendCommand(makeHalCommand<HalCommand::Tag::drain>(
@@ -405,15 +502,26 @@
 }
 
 status_t StreamHalAidl::flush(StreamDescriptor::Reply* reply) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    return sendCommand(makeHalCommand<HalCommand::Tag::flush>(), reply,
-            true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+
+    if (const auto state = getState(); isInPausedState(state)) {
+        return sendCommand(
+                makeHalCommand<HalCommand::Tag::flush>(), reply,
+                true /*safeFromNonWorkerThread*/);  // The workers stops its I/O activity first.
+    } else if (isInPlayOrRecordState(state)) {
+        AUGMENT_LOG(E, "found stream in non-flushable state: %s", toString(state).c_str());
+        return INVALID_OPERATION;
+    } else {
+        AUGMENT_LOG(D, "already stream in one of the flushable state: current state: %s",
+                    toString(state).c_str());
+        return OK;
+    }
 }
 
 status_t StreamHalAidl::exit() {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     return statusTFromBinderStatus(mStream->prepareToClose());
@@ -426,7 +534,7 @@
         sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
                 nullptr, true /*safeFromNonWorkerThread */);
     } else {
-        ALOGW("%s: unexpected onTransferReady in the state %s", __func__, toString(state).c_str());
+        AUGMENT_LOG(W, "unexpected onTransferReady in the state %s", toString(state).c_str());
     }
 }
 
@@ -434,29 +542,26 @@
     if (auto state = getState(); state == StreamDescriptor::State::DRAINING) {
         // Retrieve the current state together with position counters unconditionally
         // to ensure that the state on our side gets updated.
-        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                nullptr, true /*safeFromNonWorkerThread */);
+        sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(), nullptr,
+                    true /*safeFromNonWorkerThread */);
+        // For compatibility with HIDL behavior, apply a "soft" position reset
+        // after receiving the "drain ready" callback.
+        std::lock_guard l(mLock);
+        mStatePositions.framesAtFlushOrDrain = mLastReply.observable.frames;
     } else {
-        ALOGW("%s: unexpected onDrainReady in the state %s", __func__, toString(state).c_str());
+        AUGMENT_LOG(W, "unexpected onDrainReady in the state %s", toString(state).c_str());
     }
 }
 
 void StreamHalAidl::onAsyncError() {
     std::lock_guard l(mLock);
-    if (mLastReply.state == StreamDescriptor::State::IDLE ||
-        mLastReply.state == StreamDescriptor::State::DRAINING ||
-        mLastReply.state == StreamDescriptor::State::TRANSFERRING) {
-        mLastReply.state = StreamDescriptor::State::ERROR;
-        ALOGW("%s: onError received", __func__);
-    } else {
-        ALOGW("%s: unexpected onError in the state %s", __func__,
-                toString(mLastReply.state).c_str());
-    }
+    AUGMENT_LOG(W, "received in the state %s", toString(mLastReply.state).c_str());
+    mLastReply.state = StreamDescriptor::State::ERROR;
 }
 
 status_t StreamHalAidl::createMmapBuffer(int32_t minSizeFrames __unused,
                                          struct audio_mmap_buffer_info *info) {
-    ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isMmapped()) {
@@ -502,9 +607,9 @@
 }
 
 status_t StreamHalAidl::sendCommand(
-        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+        const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
         ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
-        bool safeFromNonWorkerThread) {
+        bool safeFromNonWorkerThread, StatePositions* statePositions) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (!safeFromNonWorkerThread) {
         const pid_t workerTid = mWorkerTid.load(std::memory_order_acquire);
@@ -516,15 +621,14 @@
     {
         std::lock_guard l(mCommandReplyLock);
         if (!mContext.getCommandMQ()->writeBlocking(&command, 1)) {
-            ALOGE("%s: failed to write command %s to MQ", __func__, command.toString().c_str());
+            AUGMENT_LOG(E, "failed to write command %s to MQ", command.toString().c_str());
             return NOT_ENOUGH_DATA;
         }
         if (reply == nullptr) {
             reply = &localReply;
         }
         if (!mContext.getReplyMQ()->readBlocking(reply, 1)) {
-            ALOGE("%s: failed to read from reply MQ, command %s",
-                    __func__, command.toString().c_str());
+            AUGMENT_LOG(E, "failed to read from reply MQ, command %s", command.toString().c_str());
             return NOT_ENOUGH_DATA;
         }
         {
@@ -536,6 +640,23 @@
             }
             mLastReply = *reply;
             mLastReplyExpirationNs = uptimeNanos() + mLastReplyLifeTimeNs;
+            if (!mIsInput && reply->status == STATUS_OK) {
+                if (command.getTag() == StreamDescriptor::Command::standby &&
+                        reply->state == StreamDescriptor::State::STANDBY) {
+                    mStatePositions.framesAtStandby = reply->observable.frames;
+                } else if (command.getTag() == StreamDescriptor::Command::flush &&
+                           reply->state == StreamDescriptor::State::IDLE) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } else if (!mContext.isAsynchronous() &&
+                        command.getTag() == StreamDescriptor::Command::drain &&
+                        (reply->state == StreamDescriptor::State::IDLE ||
+                                reply->state == StreamDescriptor::State::DRAINING)) {
+                    mStatePositions.framesAtFlushOrDrain = reply->observable.frames;
+                } // for asynchronous drain, the frame count is saved in 'onAsyncDrainReady'
+            }
+            if (statePositions != nullptr) {
+                *statePositions = mStatePositions;
+            }
         }
     }
     switch (reply->status) {
@@ -544,14 +665,15 @@
         case STATUS_INVALID_OPERATION: return INVALID_OPERATION;
         case STATUS_NOT_ENOUGH_DATA: return NOT_ENOUGH_DATA;
         default:
-            ALOGE("%s: unexpected status %d returned for command %s",
-                    __func__, reply->status, command.toString().c_str());
+            AUGMENT_LOG(E, "unexpected status %d returned for command %s", reply->status,
+                        command.toString().c_str());
             return INVALID_OPERATION;
     }
 }
 
 status_t StreamHalAidl::updateCountersIfNeeded(
-        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply) {
+        ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply,
+        StatePositions* statePositions) {
     bool doUpdate = false;
     {
         std::lock_guard l(mLock);
@@ -561,10 +683,13 @@
         // Since updates are paced, it is OK to perform them from any thread, they should
         // not interfere with I/O operations of the worker.
         return sendCommand(makeHalCommand<HalCommand::Tag::getStatus>(),
-                reply, true /*safeFromNonWorkerThread */);
+                reply, true /*safeFromNonWorkerThread */, statePositions);
     } else if (reply != nullptr) {  // provide cached reply
         std::lock_guard l(mLock);
         *reply = mLastReply;
+        if (statePositions != nullptr) {
+            *statePositions = mStatePositions;
+        }
     }
     return OK;
 }
@@ -606,10 +731,10 @@
     if (!mStream) return NO_INIT;
 
     AudioParameter parameters(kvPairs);
-    ALOGD("%s: parameters: \"%s\"", __func__, parameters.toString().c_str());
+    AUGMENT_LOG(D, "parameters: \"%s\"", parameters.toString().c_str());
 
     if (status_t status = filterAndUpdateOffloadMetadata(parameters); status != OK) {
-        ALOGW("%s: filtering or updating offload metadata failed: %d", __func__, status);
+        AUGMENT_LOG(W, "filtering or updating offload metadata failed: %d", status);
     }
 
     return StreamHalAidl::setParameters(parameters.toString());
@@ -620,9 +745,10 @@
 }
 
 status_t StreamOutHalAidl::setVolume(float left, float right) {
+    AUGMENT_LOG(V, "left %f right %f", left, right);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
-    size_t channelCount = audio_channel_out_mask_from_count(mConfig.channel_mask);
+    size_t channelCount = audio_channel_count_from_out_mask(mConfig.channel_mask);
     if (channelCount == 0) channelCount = 2;
     std::vector<float> volumes(channelCount);
     if (channelCount == 1) {
@@ -651,27 +777,33 @@
     return transfer(const_cast<void*>(buffer), bytes, written);
 }
 
-status_t StreamOutHalAidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalAidl::getRenderPosition(uint64_t *dspFrames) {
     if (dspFrames == nullptr) {
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *dspFrames = static_cast<uint32_t>(aidlFrames);
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(
+            getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // Number of audio frames since the stream has exited standby.
+    // See the table at the start of 'StreamHalInterface' on when it needs to reset.
+    int64_t mostRecentResetPoint;
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        mostRecentResetPoint = statePositions.framesAtStandby;
+    } else {
+        mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+    }
+    *dspFrames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
     return OK;
 }
 
-status_t StreamOutHalAidl::getNextWriteTimestamp(int64_t *timestamp __unused) {
-    // Obsolete, use getPresentationPosition.
-    return INVALID_OPERATION;
-}
-
 status_t StreamOutHalAidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
-    ALOGD("%p %s", this, __func__);
+    AUGMENT_LOG(D);
     TIME_CHECK();
     if (!mStream) return NO_INIT;
     if (!mContext.isAsynchronous()) {
-        ALOGE("%s: the callback is intended for asynchronous streams only", __func__);
+        AUGMENT_LOG(E, "the callback is intended for asynchronous streams only");
         return INVALID_OPERATION;
     }
     mClientCallback = callback;
@@ -707,6 +839,14 @@
 }
 
 status_t StreamOutHalAidl::drain(bool earlyNotify) {
+    if (!mStream) return NO_INIT;
+
+    if (const auto state = getState(); isInDrainedState(state)) {
+        AUGMENT_LOG(D, "stream already in %s state", toString(state).c_str());
+        if (mContext.isAsynchronous()) onDrainReady();
+        return OK;
+    }
+
     return StreamHalAidl::drain(earlyNotify);
 }
 
@@ -719,13 +859,26 @@
         return BAD_VALUE;
     }
     int64_t aidlFrames = 0, aidlTimestamp = 0;
-    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp));
-    *frames = aidlFrames;
+    StatePositions statePositions{};
+    RETURN_STATUS_IF_ERROR(getObservablePosition(&aidlFrames, &aidlTimestamp, &statePositions));
+    // See the table at the start of 'StreamHalInterface'.
+    if (!mContext.isAsynchronous() && audio_has_proportional_frames(mConfig.format)) {
+        *frames = aidlFrames;
+    } else {
+        const int64_t mostRecentResetPoint =
+                std::max(statePositions.framesAtStandby, statePositions.framesAtFlushOrDrain);
+        *frames = aidlFrames <= mostRecentResetPoint ? 0 : aidlFrames - mostRecentResetPoint;
+    }
     timestamp->tv_sec = aidlTimestamp / NANOS_PER_SECOND;
     timestamp->tv_nsec = aidlTimestamp - timestamp->tv_sec * NANOS_PER_SECOND;
     return OK;
 }
 
+status_t StreamOutHalAidl::presentationComplete() {
+    AUGMENT_LOG(D);
+    return OK;
+}
+
 status_t StreamOutHalAidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& sourceMetadata) {
     TIME_CHECK();
@@ -855,10 +1008,10 @@
     }
 }
 
-void StreamOutHalAidl::onError() {
+void StreamOutHalAidl::onError(bool isHardError) {
     onAsyncError();
     if (auto clientCb = mClientCallback.load().promote(); clientCb != nullptr) {
-        clientCb->onError();
+        clientCb->onError(isHardError);
     }
 }
 
@@ -913,10 +1066,10 @@
         updateMetadata = true;
     }
     if (updateMetadata) {
-        ALOGD("%s set offload metadata %s", __func__, mOffloadMetadata.toString().c_str());
+        AUGMENT_LOG(D, "set offload metadata %s", mOffloadMetadata.toString().c_str());
         if (status_t status = statusTFromBinderStatus(
                         mStream->updateOffloadMetadata(mOffloadMetadata)); status != OK) {
-            ALOGE("%s: updateOffloadMetadata failed %d", __func__, status);
+            AUGMENT_LOG(E, "updateOffloadMetadata failed %d", status);
             return status;
         }
     }
@@ -1003,7 +1156,7 @@
             // Note: info.portId is not filled because it's a bit of framework info.
             result.push_back(std::move(info));
         } else {
-            ALOGE("%s: no static info for active microphone with id '%s'", __func__, d.id.c_str());
+            AUGMENT_LOG(E, "no static info for active microphone with id '%s'", d.id.c_str());
         }
     }
     *microphones = std::move(result);
diff --git a/media/libaudiohal/impl/StreamHalAidl.h b/media/libaudiohal/impl/StreamHalAidl.h
index b20eb00..a1cdac4 100644
--- a/media/libaudiohal/impl/StreamHalAidl.h
+++ b/media/libaudiohal/impl/StreamHalAidl.h
@@ -53,7 +53,7 @@
 
     StreamContextAidl(
             ::aidl::android::hardware::audio::core::StreamDescriptor& descriptor,
-            bool isAsynchronous)
+            bool isAsynchronous, int ioHandle)
         : mFrameSizeBytes(descriptor.frameSizeBytes),
           mCommandMQ(new CommandMQ(descriptor.command)),
           mReplyMQ(new ReplyMQ(descriptor.reply)),
@@ -61,7 +61,8 @@
           mDataMQ(maybeCreateDataMQ(descriptor)),
           mIsAsynchronous(isAsynchronous),
           mIsMmapped(isMmapped(descriptor)),
-          mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)) {}
+          mMmapBufferDescriptor(maybeGetMmapBuffer(descriptor)),
+          mIoHandle(ioHandle) {}
     StreamContextAidl(StreamContextAidl&& other) :
             mFrameSizeBytes(other.mFrameSizeBytes),
             mCommandMQ(std::move(other.mCommandMQ)),
@@ -70,7 +71,8 @@
             mDataMQ(std::move(other.mDataMQ)),
             mIsAsynchronous(other.mIsAsynchronous),
             mIsMmapped(other.mIsMmapped),
-            mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)) {}
+            mMmapBufferDescriptor(std::move(other.mMmapBufferDescriptor)),
+            mIoHandle(other.mIoHandle) {}
     StreamContextAidl& operator=(StreamContextAidl&& other) {
         mFrameSizeBytes = other.mFrameSizeBytes;
         mCommandMQ = std::move(other.mCommandMQ);
@@ -80,6 +82,7 @@
         mIsAsynchronous = other.mIsAsynchronous;
         mIsMmapped = other.mIsMmapped;
         mMmapBufferDescriptor = std::move(other.mMmapBufferDescriptor);
+        mIoHandle = other.mIoHandle;
         return *this;
     }
     bool isValid() const {
@@ -95,7 +98,8 @@
     size_t getBufferSizeBytes() const { return mFrameSizeBytes * mBufferSizeFrames; }
     size_t getBufferSizeFrames() const { return mBufferSizeFrames; }
     size_t getBufferDurationMs(int32_t sampleRate) const {
-        return sampleRate != 0 ? mBufferSizeFrames * MILLIS_PER_SECOND / sampleRate : 0;
+        auto bufferSize = mIsMmapped ? getMmapBurstSize() : mBufferSizeFrames;
+        return sampleRate != 0 ? bufferSize * MILLIS_PER_SECOND / sampleRate : 0;
     }
     CommandMQ* getCommandMQ() const { return mCommandMQ.get(); }
     DataMQ* getDataMQ() const { return mDataMQ.get(); }
@@ -104,6 +108,8 @@
     bool isAsynchronous() const { return mIsAsynchronous; }
     bool isMmapped() const { return mIsMmapped; }
     const MmapBufferDescriptor& getMmapBufferDescriptor() const { return mMmapBufferDescriptor; }
+    size_t getMmapBurstSize() const { return mMmapBufferDescriptor.burstSizeFrames; }
+    int getIoHandle() const { return mIoHandle; }
 
   private:
     static std::unique_ptr<DataMQ> maybeCreateDataMQ(
@@ -136,6 +142,7 @@
     bool mIsAsynchronous;
     bool mIsMmapped;
     MmapBufferDescriptor mMmapBufferDescriptor;
+    int mIoHandle;
 };
 
 class StreamHalAidl : public virtual StreamHalInterface, public ConversionHelperAidl {
@@ -194,6 +201,11 @@
     // For tests.
     friend class sp<StreamHalAidl>;
 
+    struct StatePositions {
+        int64_t framesAtFlushOrDrain;
+        int64_t framesAtStandby;
+    };
+
     template<class T>
     static std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> getStreamCommon(
             const std::shared_ptr<T>& stream);
@@ -209,10 +221,51 @@
 
     ~StreamHalAidl() override;
 
+    ::aidl::android::hardware::audio::core::StreamDescriptor::State getState() {
+        std::lock_guard l(mLock);
+        return mLastReply.state;
+    }
+
+    bool isInDrainedState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::IDLE ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::STANDBY) {
+            // drain equivalent states
+            return true;
+        }
+        return false;
+    }
+
+    bool isInPlayOrRecordState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::ACTIVE ||
+            state ==
+                    ::aidl::android::hardware::audio::core::StreamDescriptor::State::TRANSFERRING ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::DRAINING) {
+            // play or record equivalent states
+            return true;
+        }
+        return false;
+    }
+
+    bool isInPausedState(
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::State& state) {
+        if (state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::PAUSED ||
+            state ==
+                    ::aidl::android::hardware::audio::core::StreamDescriptor::State::DRAIN_PAUSED ||
+            state == ::aidl::android::hardware::audio::core::StreamDescriptor::State::
+                             TRANSFER_PAUSED) {
+            // pause equivalent states
+            return true;
+        }
+        return false;
+    }
+
     status_t getLatency(uint32_t *latency);
 
     // Always returns non-negative values.
-    status_t getObservablePosition(int64_t *frames, int64_t *timestamp);
+    status_t getObservablePosition(int64_t* frames, int64_t* timestamp,
+            StatePositions* statePositions = nullptr);
 
     // Always returns non-negative values.
     status_t getHardwarePosition(int64_t *frames, int64_t *timestamp);
@@ -261,18 +314,16 @@
         result.format = config.format;
         return result;
     }
-    ::aidl::android::hardware::audio::core::StreamDescriptor::State getState() {
-        std::lock_guard l(mLock);
-        return mLastReply.state;
-    }
     // Note: Since `sendCommand` takes mLock while holding mCommandReplyLock, never call
     // it with `mLock` being held.
     status_t sendCommand(
-            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command &command,
+            const ::aidl::android::hardware::audio::core::StreamDescriptor::Command& command,
             ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
-            bool safeFromNonWorkerThread = false);
+            bool safeFromNonWorkerThread = false,
+            StatePositions* statePositions = nullptr);
     status_t updateCountersIfNeeded(
-            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr);
+            ::aidl::android::hardware::audio::core::StreamDescriptor::Reply* reply = nullptr,
+            StatePositions* statePositions = nullptr);
 
     const std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon> mStream;
     const std::shared_ptr<::aidl::android::media::audio::IHalAdapterVendorExtension> mVendorExt;
@@ -280,6 +331,9 @@
     std::mutex mLock;
     ::aidl::android::hardware::audio::core::StreamDescriptor::Reply mLastReply GUARDED_BY(mLock);
     int64_t mLastReplyExpirationNs GUARDED_BY(mLock) = 0;
+    // Cached values of observable positions when the stream last entered certain state.
+    // Updated for output streams only.
+    StatePositions mStatePositions GUARDED_BY(mLock) = {};
     // mStreamPowerLog is used for audio signal power logging.
     StreamPowerLog mStreamPowerLog;
     std::atomic<pid_t> mWorkerTid = -1;
@@ -308,10 +362,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    status_t getRenderPosition(uint32_t *dspFrames) override;
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    status_t getNextWriteTimestamp(int64_t *timestamp) override;
+    status_t getRenderPosition(uint64_t *dspFrames) override;
 
     // Set the callback for notifying completion of non-blocking write and drain.
     status_t setCallback(wp<StreamOutHalInterfaceCallback> callback) override;
@@ -331,12 +382,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     status_t drain(bool earlyNotify) override;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     status_t flush() override;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) override;
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    status_t presentationComplete() override;
+
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
@@ -370,7 +428,7 @@
     // StreamOutHalInterfaceCallback
     void onWriteReady() override;
     void onDrainReady() override;
-    void onError() override;
+    void onError(bool isHardError) override;
 
   private:
     friend class sp<StreamOutHalAidl>;
@@ -413,6 +471,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     status_t getCapturePosition(int64_t *frames, int64_t *time) override;
 
     // Get active microphones
diff --git a/media/libaudiohal/impl/StreamHalHidl.cpp b/media/libaudiohal/impl/StreamHalHidl.cpp
index 77c75db..a931fdd 100644
--- a/media/libaudiohal/impl/StreamHalHidl.cpp
+++ b/media/libaudiohal/impl/StreamHalHidl.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "StreamHalHidl"
 //#define LOG_NDEBUG 0
 
+#include <cinttypes>
+
 #include <android/hidl/manager/1.0/IServiceManager.h>
 #include <hwbinder/IPCThreadState.h>
 #include <media/AudioParameter.h>
@@ -589,32 +591,39 @@
     return OK;
 }
 
-status_t StreamOutHalHidl::getRenderPosition(uint32_t *dspFrames) {
+status_t StreamOutHalHidl::getRenderPosition(uint64_t *dspFrames) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
     Result retval;
+    uint32_t halPosition = 0;
     Return<void> ret = mStream->getRenderPosition(
             [&](Result r, uint32_t d) {
                 retval = r;
                 if (retval == Result::OK) {
-                    *dspFrames = d;
+                    halPosition = d;
                 }
             });
-    return processReturn("getRenderPosition", ret, retval);
-}
+    status_t status = processReturn("getRenderPosition", ret, retval);
+    if (status != OK) {
+        return status;
+    }
+    // Maintain a 64-bit render position using the 32-bit result from the HAL.
+    // This delta calculation relies on the arithmetic overflow behavior
+    // of integers. For example (100 - 0xFFFFFFF0) = 116.
+    std::lock_guard l(mPositionMutex);
+    const auto truncatedPosition = (uint32_t)mRenderPosition;
+    int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
+    (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
 
-status_t StreamOutHalHidl::getNextWriteTimestamp(int64_t *timestamp) {
-    TIME_CHECK();
-    if (mStream == 0) return NO_INIT;
-    Result retval;
-    Return<void> ret = mStream->getNextWriteTimestamp(
-            [&](Result r, int64_t t) {
-                retval = r;
-                if (retval == Result::OK) {
-                    *timestamp = t;
-                }
-            });
-    return processReturn("getRenderPosition", ret, retval);
+    if (deltaHalPosition >= 0) {
+        mRenderPosition += deltaHalPosition;
+    } else if (mExpectRetrograde) {
+        mExpectRetrograde = false;
+        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
+        ALOGW("Retrograde motion of %" PRId32 " frames", -deltaHalPosition);
+    }
+    *dspFrames = mRenderPosition;
+    return OK;
 }
 
 status_t StreamOutHalHidl::setCallback(wp<StreamOutHalInterfaceCallback> callback) {
@@ -667,9 +676,23 @@
 status_t StreamOutHalHidl::flush() {
     TIME_CHECK();
     if (mStream == 0) return NO_INIT;
+    {
+        std::lock_guard l(mPositionMutex);
+        mRenderPosition = 0;
+        mExpectRetrograde = false;
+    }
     return processReturn("pause", mStream->flush());
 }
 
+status_t StreamOutHalHidl::standby() {
+    {
+        std::lock_guard l(mPositionMutex);
+        mRenderPosition = 0;
+        mExpectRetrograde = false;
+    }
+    return StreamHalHidl::standby();
+}
+
 status_t StreamOutHalHidl::getPresentationPosition(uint64_t *frames, struct timespec *timestamp) {
     // TIME_CHECK();  // TODO(b/243839867) reenable only when optimized.
     if (mStream == 0) return NO_INIT;
@@ -696,6 +719,16 @@
     }
 }
 
+status_t StreamOutHalHidl::presentationComplete() {
+    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
+    // transitioning between tracks.
+    // The HAL resets the frame position without flush/stop being called, but calls back prior to
+    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
+    // mRenderPosition.
+    mExpectRetrograde = true;
+    return OK;
+}
+
 #if MAJOR_VERSION == 2
 status_t StreamOutHalHidl::updateSourceMetadata(
         const StreamOutHalInterface::SourceMetadata& /* sourceMetadata */) {
@@ -964,7 +997,7 @@
     sp<StreamOutHalInterfaceCallback> callback = mCallback.load().promote();
     if (callback == 0) return;
     ALOGV("asyncCallback onError");
-    callback->onError();
+    callback->onError(false /*isHardError*/);
 }
 
 void StreamOutHalHidl::onCodecFormatChanged(const std::vector<uint8_t>& metadataBs) {
diff --git a/media/libaudiohal/impl/StreamHalHidl.h b/media/libaudiohal/impl/StreamHalHidl.h
index 48da633..433e0a3 100644
--- a/media/libaudiohal/impl/StreamHalHidl.h
+++ b/media/libaudiohal/impl/StreamHalHidl.h
@@ -18,10 +18,12 @@
 #define ANDROID_HARDWARE_STREAM_HAL_HIDL_H
 
 #include <atomic>
+#include <mutex>
 
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStream.h)
 #include PATH(android/hardware/audio/CORE_TYPES_FILE_VERSION/IStreamIn.h)
 #include PATH(android/hardware/audio/FILE_VERSION/IStreamOut.h)
+#include <android-base/thread_annotations.h>
 #include <fmq/EventFlag.h>
 #include <fmq/MessageQueue.h>
 #include <media/audiohal/EffectHalInterface.h>
@@ -119,6 +121,9 @@
 
 class StreamOutHalHidl : public StreamOutHalInterface, public StreamHalHidl {
   public:
+    // Put the audio hardware input/output into standby mode (from StreamHalInterface).
+    status_t standby() override;
+
     // Return the frame size (number of bytes per sample) of a stream.
     virtual status_t getFrameSize(size_t *size);
 
@@ -136,10 +141,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames);
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp);
+    virtual status_t getRenderPosition(uint64_t *dspFrames);
 
     // Set the callback for notifying completion of non-blocking write and drain.
     virtual status_t setCallback(wp<StreamOutHalInterfaceCallback> callback);
@@ -159,12 +161,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify);
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush();
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    status_t presentationComplete() override;
+
     // Called when the metadata of the stream's source has been changed.
     status_t updateSourceMetadata(const SourceMetadata& sourceMetadata) override;
 
@@ -221,6 +230,10 @@
     std::unique_ptr<StatusMQ> mStatusMQ;
     std::atomic<pid_t> mWriterClient;
     EventFlag* mEfGroup;
+    std::mutex mPositionMutex;
+    // Used to expand correctly the 32-bit position from the HAL.
+    uint64_t mRenderPosition GUARDED_BY(mPositionMutex) = 0;
+    bool mExpectRetrograde GUARDED_BY(mPositionMutex) = false; // See 'presentationComplete'.
 
     // Can not be constructed directly by clients.
     StreamOutHalHidl(const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& stream);
@@ -250,6 +263,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time);
 
     // Get active microphones
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
index ca6ff88..7879200 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
@@ -17,6 +17,7 @@
 #include <cstdint>
 #include <cstring>
 #include <optional>
+#include <unordered_set>
 #define LOG_TAG "AidlConversionEQ"
 //#define LOG_NDEBUG 0
 
@@ -262,10 +263,21 @@
         }
         case EQ_PARAM_GET_NUM_OF_PRESETS: {
             Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::presets));
-            const auto& presets = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+            auto presets = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
                     aidlParam, Equalizer, equalizer, Equalizer::presets,
                     std::vector<Equalizer::Preset>));
-            uint16_t num = presets.size();
+            // it was assumed the presets index in the range of [0, NUM_OF_PRESETS - 1], so
+            // filter out presets out of this range (one example is preset {-1, "custom"})
+            std::erase_if(presets, [](const auto& preset) { return preset.index < 0; });
+            // validate remaining indexes are unique [0, num - 1]
+            std::unordered_set<uint16_t> uniqueIndices;
+            const uint16_t num = presets.size();
+            for (const auto& preset : presets) {
+                if (preset.index >= num || 0 != uniqueIndices.count(preset.index)) {
+                    return BAD_VALUE;
+                }
+                uniqueIndices.insert(preset.index);
+            }
             return param.writeToValue(&num);
         }
         case EQ_PARAM_GET_PRESET_NAME: {
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
index bdee7b6..e87993a 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
@@ -52,12 +52,17 @@
     switch (type) {
         case HG_PARAM_HAPTIC_INTENSITY: {
             int32_t id = 0, scale;
-            if (OK != param.readFromValue(&id) || OK != param.readFromValue(&scale)) {
+            float scaleFactor, adaptiveScaleFactor;
+            if (OK != param.readFromValue(&id) || OK != param.readFromValue(&scale) ||
+                OK != param.readFromValue(&scaleFactor) ||
+                OK != param.readFromValue(&adaptiveScaleFactor)) {
                 ALOGE("%s invalid intensity %s", __func__, param.toString().c_str());
                 return BAD_VALUE;
             }
-            HapticGenerator::HapticScale hpScale(
-                    {.id = id, .scale = (HapticGenerator::VibratorScale)(scale)});
+            HapticGenerator::HapticScale hpScale({.id = id,
+                                                  .scale = (HapticGenerator::VibratorScale)(scale),
+                                                  .scaleFactor = scaleFactor,
+                                                  .adaptiveScaleFactor = adaptiveScaleFactor});
             aidlParam = MAKE_SPECIFIC_PARAMETER(HapticGenerator, hapticGenerator, hapticScales,
                                                 {hpScale});
             break;
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 7f6c1fb..3f16526 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -90,7 +90,8 @@
             audio_output_flags_t flags,
             struct audio_config *config,
             const char *address,
-            sp<StreamOutHalInterface> *outStream) = 0;
+            sp<StreamOutHalInterface> *outStream,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata = {}) = 0;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
index 37615af..4bd7e3d 100644
--- a/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/StreamHalInterface.h
@@ -107,7 +107,7 @@
   public:
     virtual void onWriteReady() {}
     virtual void onDrainReady() {}
-    virtual void onError() {}
+    virtual void onError(bool /*isHardError*/) {}
 
   protected:
     StreamOutHalInterfaceCallback() = default;
@@ -135,6 +135,38 @@
     virtual ~StreamOutHalInterfaceLatencyModeCallback() = default;
 };
 
+/**
+ * On position reporting. There are two methods: 'getRenderPosition' and
+ * 'getPresentationPosition'. The first difference is that they may have a
+ * time offset because "render" position relates to what happens between
+ * ADSP and DAC, while "observable" position is relative to the external
+ * observer. The second difference is that 'getRenderPosition' always
+ * resets on standby (for all types of stream data) according to its
+ * definition. Since the original C definition of 'getRenderPosition' used
+ * 32-bit frame counters, and also because in complex playback chains that
+ * include wireless devices the "observable" position has more practical
+ * meaning, 'getRenderPosition' does not exist in the AIDL HAL interface.
+ * The table below summarizes frame count behavior for 'getPresentationPosition':
+ *
+ *               | Mixed      | Direct       | Direct
+ *               |            | non-offload  | offload
+ * ==============|============|==============|==============
+ *  PCM and      | Continuous |              |
+ *  encapsulated |            |              |
+ *  bitstream    |            |              |
+ * --------------|------------| Continuous†  |
+ *  Bitstream    |            |              | Reset on
+ *  encapsulated |            |              | flush, drain
+ *  into PCM     |            |              | and standby
+ *               | Not        |              |
+ * --------------| supported  |--------------|
+ *  Bitstream    |            | Reset on     |
+ *               |            | flush, drain |
+ *               |            | and standby  |
+ *               |            |              |
+ *
+ * † - on standby, reset of the frame count happens at the framework level.
+ */
 class StreamOutHalInterface : public virtual StreamHalInterface {
   public:
     // Return the audio hardware driver estimated latency in milliseconds.
@@ -151,10 +183,7 @@
 
     // Return the number of audio frames written by the audio dsp to DAC since
     // the output has exited standby.
-    virtual status_t getRenderPosition(uint32_t *dspFrames) = 0;
-
-    // Get the local time at which the next write to the audio driver will be presented.
-    virtual status_t getNextWriteTimestamp(int64_t *timestamp) = 0;
+    virtual status_t getRenderPosition(uint64_t *dspFrames) = 0;
 
     // Set the callback for notifying completion of non-blocking write and drain.
     // The callback must be owned by someone else. The output stream does not own it
@@ -176,12 +205,19 @@
     // Requests notification when data buffered by the driver/hardware has been played.
     virtual status_t drain(bool earlyNotify) = 0;
 
-    // Notifies to the audio driver to flush the queued data.
+    // Notifies to the audio driver to flush (that is, drop) the queued data. Stream must
+    // already be paused before calling 'flush'.
     virtual status_t flush() = 0;
 
     // Return a recent count of the number of audio frames presented to an external observer.
+    // This excludes frames which have been written but are still in the pipeline. See the
+    // table at the start of the 'StreamOutHalInterface' for the specification of the frame
+    // count behavior w.r.t. 'flush', 'drain' and 'standby' operations.
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp) = 0;
 
+    // Notifies the HAL layer that the framework considers the current playback as completed.
+    virtual status_t presentationComplete() = 0;
+
     struct SourceMetadata {
         std::vector<playback_track_metadata_v7_t> tracks;
     };
@@ -270,6 +306,7 @@
 
     // Return a recent count of the number of audio frames received and
     // the clock time associated with that frame count.
+    // The count must not reset to zero when a PCM input enters standby.
     virtual status_t getCapturePosition(int64_t *frames, int64_t *time) = 0;
 
     // Get active microphones
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 5106874..0f5334f 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -18,6 +18,7 @@
 #include <memory>
 #include <mutex>
 #include <string>
+#include <thread>
 #include <vector>
 
 #define LOG_TAG "CoreAudioHalAidlTest"
@@ -28,7 +29,9 @@
 #include <StreamHalAidl.h>
 #include <aidl/android/hardware/audio/core/BnModule.h>
 #include <aidl/android/hardware/audio/core/BnStreamCommon.h>
+#include <aidl/android/hardware/audio/core/BnStreamOut.h>
 #include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
+#include <aidl/android/media/audio/common/AudioGainMode.h>
 #include <aidl/android/media/audio/common/Int.h>
 #include <utils/Log.h>
 
@@ -39,10 +42,13 @@
 using ::aidl::android::hardware::audio::core::VendorParameter;
 using ::aidl::android::media::audio::common::AudioChannelLayout;
 using ::aidl::android::media::audio::common::AudioConfig;
+using ::aidl::android::media::audio::common::AudioDevice;
 using ::aidl::android::media::audio::common::AudioDeviceDescription;
 using ::aidl::android::media::audio::common::AudioDeviceType;
 using ::aidl::android::media::audio::common::AudioFormatDescription;
 using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioGainConfig;
+using ::aidl::android::media::audio::common::AudioGainMode;
 using ::aidl::android::media::audio::common::AudioIoFlags;
 using ::aidl::android::media::audio::common::AudioPort;
 using ::aidl::android::media::audio::common::AudioPortConfig;
@@ -60,13 +66,13 @@
     const std::vector<VendorParameter>& getSyncParameters() const { return mSyncParameters; }
 
   protected:
-    ndk::ScopedAStatus getVendorParametersImpl(const std::vector<std::string>& in_parameterIds) {
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds) {
         mGetParameterIds.insert(mGetParameterIds.end(), in_parameterIds.begin(),
                                 in_parameterIds.end());
         return ndk::ScopedAStatus::ok();
     }
-    ndk::ScopedAStatus setVendorParametersImpl(const std::vector<VendorParameter>& in_parameters,
-                                               bool async) {
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+                                           bool async) {
         if (async) {
             mAsyncParameters.insert(mAsyncParameters.end(), in_parameters.begin(),
                                     in_parameters.end());
@@ -160,6 +166,34 @@
             createProfile(PcmType::INT_16_BIT, {AudioChannelLayout::LAYOUT_STEREO}, {48000})};
     Configuration c;
 
+    AudioPort micInDevice =
+            createPort(c.nextPortId++, "Built-In Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE,
+                                           1 << AudioPortDeviceExt::FLAG_INDEX_DEFAULT_DEVICE));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInDevice);
+
+    AudioPort micInBackDevice =
+            createPort(c.nextPortId++, "Built-In Back Mic", 0, true,
+                       createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0));
+    micInDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(micInBackDevice);
+
+    AudioPort primaryInMix =
+            createPort(c.nextPortId++, "primary input", 0, true, createPortMixExt(0, 1));
+    primaryInMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(primaryInMix);
+
+    AudioPort speakerOutDevice = createPort(c.nextPortId++, "Speaker", 0, false,
+                                            createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0));
+    speakerOutDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(speakerOutDevice);
+
+    AudioPort primaryOutMix =
+            createPort(c.nextPortId++, "primary output", 0, false, createPortMixExt(1, 1));
+    primaryOutMix.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(primaryOutMix);
+
     AudioPort btOutDevice =
             createPort(c.nextPortId++, "BT A2DP Out", 0, false,
                        createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -172,11 +206,142 @@
     btOutMix.profiles = standardPcmAudioProfiles;
     c.ports.push_back(btOutMix);
 
+    c.routes.push_back(createRoute({micInDevice, micInBackDevice}, primaryInMix));
+    c.routes.push_back(createRoute({primaryOutMix}, speakerOutDevice));
     c.routes.push_back(createRoute({btOutMix}, btOutDevice));
 
     return c;
 }
 
+class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
+                         public VendorParameterMock {
+    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
+    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
+                                           std::vector<VendorParameter>*) override {
+        return VendorParameterMock::getVendorParameters(in_parameterIds);
+    }
+    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
+                                           bool async) override {
+        return VendorParameterMock::setVendorParameters(in_parameters, async);
+    }
+    ndk::ScopedAStatus addEffect(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus removeEffect(
+            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+};
+
+class StreamContext {
+  public:
+    using Descriptor = ::aidl::android::hardware::audio::core::StreamDescriptor;
+    typedef ::android::AidlMessageQueue<
+            Descriptor::Command, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            CommandMQ;
+    typedef ::android::AidlMessageQueue<
+            Descriptor::Reply, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            ReplyMQ;
+    typedef ::android::AidlMessageQueue<
+            int8_t, ::aidl::android::hardware::common::fmq::SynchronizedReadWrite>
+            DataMQ;
+
+    StreamContext() = default;
+    StreamContext(std::unique_ptr<CommandMQ> commandMQ, std::unique_ptr<ReplyMQ> replyMQ,
+                  std::unique_ptr<DataMQ> dataMQ)
+        : mCommandMQ(std::move(commandMQ)),
+          mReplyMQ(std::move(replyMQ)),
+          mDataMQ(std::move(dataMQ)) {}
+    void fillDescriptor(Descriptor* desc) {
+        if (mCommandMQ) {
+            desc->command = mCommandMQ->dupeDesc();
+        }
+        if (mReplyMQ) {
+            desc->reply = mReplyMQ->dupeDesc();
+        }
+        if (mDataMQ) {
+            desc->frameSizeBytes = 2;
+            desc->bufferSizeFrames = 48;
+            desc->audio.set<Descriptor::AudioBuffer::Tag::fmq>(mDataMQ->dupeDesc());
+        }
+    }
+
+  private:
+    std::unique_ptr<CommandMQ> mCommandMQ =
+            std::make_unique<CommandMQ>(1, true /*configureEventFlagWord*/);
+    std::unique_ptr<ReplyMQ> mReplyMQ =
+            std::make_unique<ReplyMQ>(1, true /*configureEventFlagWord*/);
+    std::unique_ptr<DataMQ> mDataMQ = std::make_unique<DataMQ>(96);
+};
+
+class StreamOutMock : public ::aidl::android::hardware::audio::core::BnStreamOut {
+  public:
+    explicit StreamOutMock(StreamContext&& ctx) : mContext(std::move(ctx)) {}
+
+  private:
+    ndk::ScopedAStatus getStreamCommon(
+            std::shared_ptr<::aidl::android::hardware::audio::core::IStreamCommon>* _aidl_return)
+            override {
+        if (!mCommon) {
+            mCommon = ndk::SharedRefBase::make<StreamCommonMock>();
+        }
+        *_aidl_return = mCommon;
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateMetadata(
+            const ::aidl::android::hardware::audio::common::SourceMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus updateOffloadMetadata(
+            const ::aidl::android::hardware::audio::common::AudioOffloadMetadata&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getHwVolume(std::vector<float>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setHwVolume(const std::vector<float>&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getAudioDescriptionMixLevel(float*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setAudioDescriptionMixLevel(float) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getDualMonoMode(
+            ::aidl::android::media::audio::common::AudioDualMonoMode*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setDualMonoMode(
+            ::aidl::android::media::audio::common::AudioDualMonoMode) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getRecommendedLatencyModes(
+            std::vector<::aidl::android::media::audio::common::AudioLatencyMode>*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setLatencyMode(
+            ::aidl::android::media::audio::common::AudioLatencyMode) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus getPlaybackRateParameters(
+            ::aidl::android::media::audio::common::AudioPlaybackRate*) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus setPlaybackRateParameters(
+            const ::aidl::android::media::audio::common::AudioPlaybackRate&) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    ndk::ScopedAStatus selectPresentation(int32_t, int32_t) override {
+        return ndk::ScopedAStatus::ok();
+    }
+    StreamContext mContext;
+    std::shared_ptr<StreamCommonMock> mCommon;
+};
+
 class ModuleMock : public ::aidl::android::hardware::audio::core::BnModule,
                    public VendorParameterMock {
   public:
@@ -184,6 +349,18 @@
     explicit ModuleMock(const Configuration& config) : mConfig(config) {}
     bool isScreenTurnedOn() const { return mIsScreenTurnedOn; }
     ScreenRotation getScreenRotation() const { return mScreenRotation; }
+    std::vector<AudioPatch> getPatches() {
+        std::vector<AudioPatch> result;
+        getAudioPatches(&result);
+        return result;
+    }
+    std::optional<AudioPortConfig> getPortConfig(int32_t id) {
+        auto iter = findById<AudioPortConfig>(mConfig.portConfigs, id);
+        if (iter != mConfig.portConfigs.end()) {
+            return *iter;
+        }
+        return std::nullopt;
+    }
 
   private:
     ndk::ScopedAStatus setModuleDebug(
@@ -299,7 +476,10 @@
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus openOutputStream(const OpenOutputStreamArguments&,
-                                        OpenOutputStreamReturn*) override {
+                                        OpenOutputStreamReturn* _aidl_return) override {
+        StreamContext context;
+        context.fillDescriptor(&_aidl_return->desc);
+        _aidl_return->stream = ndk::SharedRefBase::make<StreamOutMock>(std::move(context));
         return ndk::ScopedAStatus::ok();
     }
     ndk::ScopedAStatus getSupportedPlaybackRateFactors(SupportedPlaybackRateFactors*) override {
@@ -311,6 +491,7 @@
         if (requested.id == 0) {
             *patch = requested;
             patch->id = mConfig.nextPatchId++;
+            patch->latenciesMs.push_back(100);
             mConfig.patches.push_back(*patch);
             ALOGD("%s: returning %s", __func__, patch->toString().c_str());
         } else {
@@ -397,11 +578,11 @@
     ndk::ScopedAStatus generateHwAvSyncId(int32_t*) override { return ndk::ScopedAStatus::ok(); }
     ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
                                            std::vector<VendorParameter>*) override {
-        return getVendorParametersImpl(in_parameterIds);
+        return VendorParameterMock::getVendorParameters(in_parameterIds);
     }
     ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
                                            bool async) override {
-        return setVendorParametersImpl(in_parameters, async);
+        return VendorParameterMock::setVendorParameters(in_parameters, async);
     }
     ndk::ScopedAStatus addDeviceEffect(
             int32_t,
@@ -434,29 +615,6 @@
     ScreenRotation mScreenRotation = ScreenRotation::DEG_0;
 };
 
-class StreamCommonMock : public ::aidl::android::hardware::audio::core::BnStreamCommon,
-                         public VendorParameterMock {
-    ndk::ScopedAStatus close() override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus prepareToClose() override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus updateHwAvSyncId(int32_t) override { return ndk::ScopedAStatus::ok(); }
-    ndk::ScopedAStatus getVendorParameters(const std::vector<std::string>& in_parameterIds,
-                                           std::vector<VendorParameter>*) override {
-        return getVendorParametersImpl(in_parameterIds);
-    }
-    ndk::ScopedAStatus setVendorParameters(const std::vector<VendorParameter>& in_parameters,
-                                           bool async) override {
-        return setVendorParametersImpl(in_parameters, async);
-    }
-    ndk::ScopedAStatus addEffect(
-            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
-        return ndk::ScopedAStatus::ok();
-    }
-    ndk::ScopedAStatus removeEffect(
-            const std::shared_ptr<::aidl::android::hardware::audio::effect::IEffect>&) override {
-        return ndk::ScopedAStatus::ok();
-    }
-};
-
 VendorParameter makeVendorParameter(const std::string& id, int value) {
     VendorParameter result{.id = id};
     // Note: in real life, a parcelable type defined by vendor must be used,
@@ -621,6 +779,19 @@
 }
 }  // namespace aidl::android::hardware::audio::core
 
+namespace aidl::android::media::audio::common {
+template <typename P>
+std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>,
+                 std::ostream&>
+operator<<(std::ostream& os, const P& p) {
+    return os << p.toString();
+}
+template <typename E>
+std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) {
+    return os << toString(e);
+}
+}  // namespace aidl::android::media::audio::common
+
 using namespace android;
 
 namespace {
@@ -655,7 +826,7 @@
 class DeviceHalAidlTest : public testing::Test {
   public:
     void SetUp() override {
-        mModule = ndk::SharedRefBase::make<ModuleMock>();
+        mModule = ndk::SharedRefBase::make<ModuleMock>(getTestConfiguration());
         mDevice = sp<DeviceHalAidl>::make("test", mModule, nullptr /*vext*/);
     }
     void TearDown() override {
@@ -697,6 +868,46 @@
     EXPECT_EQ(ScreenRotation::DEG_0, mModule->getScreenRotation());
 }
 
+// See http://b/357487484#comment6
+TEST_F(DeviceHalAidlTest, StreamReleaseOnMapperCleanup) {
+    ASSERT_EQ(OK, mDevice->initCheck());
+    // Since the test is in effect probabilistic, try multiple times.
+    for (int i = 0; i < 100; ++i) {
+        sp<StreamOutHalInterface> stream1;
+        struct audio_config config = AUDIO_CONFIG_INITIALIZER;
+        config.sample_rate = 48000;
+        config.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+        config.format = AUDIO_FORMAT_PCM_16_BIT;
+        ASSERT_EQ(OK, mDevice->openOutputStream(42 /*handle*/, AUDIO_DEVICE_OUT_SPEAKER,
+                                                AUDIO_OUTPUT_FLAG_NONE, &config, "" /*address*/,
+                                                &stream1));
+        ASSERT_EQ(1, stream1->getStrongCount());
+        std::atomic<bool> stopReleaser = false;
+        // Try to catch the moment when Hal2AidlMapper promotes its wp<StreamHalInterface> to sp<>
+        // in Hal2AidlMapper::resetUnusedPatchesAndPortConfigs and release on our side in order to
+        // make Hal2AidlMapper the sole owner via a temporary sp and enforce destruction of the
+        // stream while the DeviceHalAidl::mLock is held.
+        std::thread releaser([&stream1, &stopReleaser]() {
+            while (!stopReleaser) {
+                if (stream1->getStrongCount() > 1) {
+                    stream1.clear();
+                    break;
+                }
+                std::this_thread::yield();
+            }
+        });
+        sp<StreamOutHalInterface> stream2;
+        // Opening another stream triggers a call to
+        // Hal2AidlMapper::resetUnusedPatchesAndPortConfigs.  It must not cause a deadlock of the
+        // test (main) thread.
+        ASSERT_EQ(OK, mDevice->openOutputStream(43 /*handle*/, AUDIO_DEVICE_OUT_SPEAKER,
+                                                AUDIO_OUTPUT_FLAG_NONE, &config, "" /*address*/,
+                                                &stream2));
+        stopReleaser = true;
+        releaser.join();
+    }
+}
+
 class DeviceHalAidlVendorParametersTest : public testing::Test {
   public:
     void SetUp() override {
@@ -777,9 +988,9 @@
         mVendorExt = ndk::SharedRefBase::make<TestHalAdapterVendorExtension>();
         struct audio_config config = AUDIO_CONFIG_INITIALIZER;
         ::aidl::android::hardware::audio::core::StreamDescriptor descriptor;
+        StreamContextAidl context(descriptor, false /*isAsynchronous*/, 0);
         mStream = sp<StreamHalAidl>::make("test", false /*isInput*/, config, 0 /*nominalLatency*/,
-                                          StreamContextAidl(descriptor, false /*isAsynchronous*/),
-                                          mStreamCommon, mVendorExt);
+                                          std::move(context), mStreamCommon, mVendorExt);
     }
     void TearDown() override {
         mStream.clear();
@@ -1141,3 +1352,103 @@
     EXPECT_EQ(0, mMapper->findFwkPatch(mPatch.id));
     EXPECT_EQ(0, mMapper->findFwkPatch(newPatchId));
 }
+
+TEST_F(Hal2AidlMapperTest, ChangeTransientPatchDevice) {
+    std::mutex mutex;  // Only needed for cleanups.
+    auto mapperAccessor = std::make_unique<LockedAccessor<Hal2AidlMapper>>(*mMapper, mutex);
+    Hal2AidlMapper::Cleanups cleanups(*mapperAccessor);
+    AudioConfig config;
+    config.base.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+    config.base.format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    config.base.sampleRate = 48000;
+    AudioDevice defaultDevice;
+    defaultDevice.type.type = AudioDeviceType::IN_DEFAULT;
+    AudioPortConfig mixPortConfig;
+    AudioPatch transientPatch;
+    ASSERT_EQ(OK, mMapper->prepareToOpenStream(43 /*ioHandle*/, defaultDevice,
+                                               AudioIoFlags::make<AudioIoFlags::input>(0),
+                                               AudioSource::DEFAULT, &cleanups, &config,
+                                               &mixPortConfig, &transientPatch));
+    cleanups.disarmAll();
+    ASSERT_NE(0, transientPatch.id);
+    ASSERT_NE(0, mixPortConfig.id);
+    sp<StreamHalInterface> stream = sp<StreamHalMock>::make();
+    mMapper->addStream(stream, mixPortConfig.id, transientPatch.id);
+
+    AudioPatch patch{};
+    int32_t patchId;
+    AudioPortConfig backMicPortConfig;
+    backMicPortConfig.channelMask = config.base.channelMask;
+    backMicPortConfig.format = config.base.format;
+    backMicPortConfig.sampleRate = aidl::android::media::audio::common::Int{config.base.sampleRate};
+    backMicPortConfig.flags = AudioIoFlags::make<AudioIoFlags::input>(0);
+    backMicPortConfig.ext = createPortDeviceExt(AudioDeviceType::IN_MICROPHONE_BACK, 0);
+    ASSERT_EQ(OK, mMapper->createOrUpdatePatch({backMicPortConfig}, {mixPortConfig}, &patchId,
+                                               &cleanups));
+    cleanups.disarmAll();
+    ASSERT_EQ(android::OK,
+              mMapper->findPortConfig(backMicPortConfig.ext.get<AudioPortExt::device>().device,
+                                      &backMicPortConfig));
+    EXPECT_NE(0, backMicPortConfig.id);
+
+    EXPECT_EQ(transientPatch.id, patchId);
+    auto patches = mModule->getPatches();
+    auto patchIt = findById(patches, patchId);
+    ASSERT_NE(patchIt, patches.end());
+    EXPECT_EQ(std::vector<int32_t>{backMicPortConfig.id}, patchIt->sourcePortConfigIds);
+    EXPECT_EQ(std::vector<int32_t>{mixPortConfig.id}, patchIt->sinkPortConfigIds);
+}
+
+TEST_F(Hal2AidlMapperTest, SetAudioPortConfigGainChangeExistingPortConfig) {
+    // First set config, then update gain.
+    AudioPortConfig speakerPortConfig;
+    speakerPortConfig.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    speakerPortConfig.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+    speakerPortConfig.format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    speakerPortConfig.sampleRate = ::aidl::android::media::audio::common::Int(48000);
+    AudioPortConfig resultingPortConfig;
+    ASSERT_EQ(OK,
+              mMapper->setPortConfig(speakerPortConfig, std::set<int32_t>(), &resultingPortConfig));
+    EXPECT_NE(0, resultingPortConfig.id);
+    EXPECT_NE(0, resultingPortConfig.portId);
+
+    AudioPortConfig gainUpdate;
+    gainUpdate.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    AudioGainConfig gainConfig{.index = -1,
+                               .mode = 1 << static_cast<int>(AudioGainMode::JOINT),
+                               .channelMask = AudioChannelLayout{},
+                               .values = std::vector<int32_t>{-3200},
+                               .rampDurationMs = 0};
+    gainUpdate.gain = gainConfig;
+    AudioPortConfig resultingGainUpdate;
+    ASSERT_EQ(OK, mMapper->setPortConfig(gainUpdate, std::set<int32_t>(), &resultingGainUpdate));
+    EXPECT_EQ(resultingPortConfig.id, resultingGainUpdate.id);
+    auto updatedPortConfig = mModule->getPortConfig(resultingGainUpdate.id);
+    ASSERT_TRUE(updatedPortConfig.has_value());
+    ASSERT_TRUE(updatedPortConfig->gain.has_value());
+    EXPECT_EQ(gainConfig, updatedPortConfig->gain);
+}
+
+TEST_F(Hal2AidlMapperTest, SetAudioPortConfigGainChangeFromScratch) {
+    // Set gain as the first operation, the HAL should suggest the rest of the configuration.
+    AudioPortConfig gainSet;
+    gainSet.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    AudioGainConfig gainConfig{.index = -1,
+                               .mode = 1 << static_cast<int>(AudioGainMode::JOINT),
+                               .channelMask = AudioChannelLayout{},
+                               .values = std::vector<int32_t>{-3200},
+                               .rampDurationMs = 0};
+    gainSet.gain = gainConfig;
+    AudioPortConfig resultingPortConfig;
+    ASSERT_EQ(OK, mMapper->setPortConfig(gainSet, std::set<int32_t>(), &resultingPortConfig));
+    EXPECT_NE(0, resultingPortConfig.id);
+    EXPECT_NE(0, resultingPortConfig.portId);
+    auto portConfig = mModule->getPortConfig(resultingPortConfig.id);
+    ASSERT_TRUE(portConfig.has_value());
+    ASSERT_TRUE(portConfig->gain.has_value());
+    EXPECT_EQ(gainConfig, portConfig->gain);
+}
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index f9ae2d4..7ef9ff2 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -585,7 +585,7 @@
     t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     // haptic
     t->mHapticPlaybackEnabled = false;
-    t->mHapticScale = {/*level=*/os::HapticLevel::NONE };
+    t->mHapticScale = os::HapticScale::none();
     t->mHapticMaxAmplitude = NAN;
     t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
     t->mMixerHapticChannelCount = 0;
diff --git a/media/libaudioprocessing/AudioMixerOps.h b/media/libaudioprocessing/AudioMixerOps.h
index ab6a8b6..8f60d29 100644
--- a/media/libaudioprocessing/AudioMixerOps.h
+++ b/media/libaudioprocessing/AudioMixerOps.h
@@ -347,6 +347,7 @@
         [6] = AUDIO_CHANNEL_OUT_5POINT1,
         [7] = AUDIO_CHANNEL_OUT_6POINT1,
         [8] = AUDIO_CHANNEL_OUT_7POINT1,
+        [10] = AUDIO_CHANNEL_OUT_5POINT1POINT4,
         [12] = AUDIO_CHANNEL_OUT_7POINT1POINT4,
         [14] = AUDIO_CHANNEL_OUT_9POINT1POINT4,
         [16] = AUDIO_CHANNEL_OUT_9POINT1POINT6,
diff --git a/media/libaudioprocessing/tests/mixerops_tests.cpp b/media/libaudioprocessing/tests/mixerops_tests.cpp
index 2500ba9..235129f 100644
--- a/media/libaudioprocessing/tests/mixerops_tests.cpp
+++ b/media/libaudioprocessing/tests/mixerops_tests.cpp
@@ -154,6 +154,9 @@
 TEST(mixerops, stereovolume_8) {
     MixerOpsBasicTest<MIXTYPE_MULTI_STEREOVOL, 8>::testStereoVolume();
 }
+TEST(mixerops, stereovolume_10) {
+    MixerOpsBasicTest<MIXTYPE_MULTI_STEREOVOL, 10>::testStereoVolume();
+}
 TEST(mixerops, stereovolume_12) {
     if constexpr (FCC_LIMIT >= 12) { // NOTE: FCC_LIMIT is an enum, so can't #if
         MixerOpsBasicTest<MIXTYPE_MULTI_STEREOVOL, 12>::testStereoVolume();
diff --git a/media/libeffects/data/Android.bp b/media/libeffects/data/Android.bp
new file mode 100644
index 0000000..2acf229
--- /dev/null
+++ b/media/libeffects/data/Android.bp
@@ -0,0 +1,19 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+prebuilt_etc {
+    name: "framework-audio_effects.xml",
+    src: "audio_effects.xml",
+    filename: "audio_effects.xml",
+}
diff --git a/media/libeffects/downmix/aidl/DownmixContext.cpp b/media/libeffects/downmix/aidl/DownmixContext.cpp
index 593e16f..3a55361 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.cpp
+++ b/media/libeffects/downmix/aidl/DownmixContext.cpp
@@ -100,11 +100,6 @@
     return RetCode::SUCCESS;
 }
 
-void DownmixContext::reset() {
-    disable();
-    resetBuffer();
-}
-
 IEffect::Status DownmixContext::downmixProcess(float* in, float* out, int samples) {
     IEffect::Status status = {EX_ILLEGAL_ARGUMENT, 0, 0};
 
diff --git a/media/libeffects/downmix/aidl/DownmixContext.h b/media/libeffects/downmix/aidl/DownmixContext.h
index a381d7f..1be1508 100644
--- a/media/libeffects/downmix/aidl/DownmixContext.h
+++ b/media/libeffects/downmix/aidl/DownmixContext.h
@@ -32,9 +32,8 @@
   public:
     DownmixContext(int statusDepth, const Parameter::Common& common);
     ~DownmixContext();
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setDmType(Downmix::Type type) {
         mType = type;
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.cpp b/media/libeffects/downmix/aidl/EffectDownmix.cpp
index de60ca4..10c7c4f 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.cpp
+++ b/media/libeffects/downmix/aidl/EffectDownmix.cpp
@@ -71,26 +71,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus DownmixImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->reset();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus DownmixImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::downmix != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
@@ -177,7 +157,10 @@
      * in the life cycle of workerThread (threadLoop).
      */
     uint32_t efState = 0;
-    if (!mEventFlag || ::android::OK != mEventFlag->wait(kEventFlagNotEmpty, &efState)) {
+    if (!mEventFlag ||
+        ::android::OK != mEventFlag->wait(mDataMqNotEmptyEf, &efState, 0 /* no timeout */,
+                                          true /* retry */) ||
+        !(efState & mDataMqNotEmptyEf)) {
         LOG(ERROR) << getEffectName() << __func__ << ": StatusEventFlag invalid";
     }
 
diff --git a/media/libeffects/downmix/aidl/EffectDownmix.h b/media/libeffects/downmix/aidl/EffectDownmix.h
index b7d621a..cea6d1b 100644
--- a/media/libeffects/downmix/aidl/EffectDownmix.h
+++ b/media/libeffects/downmix/aidl/EffectDownmix.h
@@ -31,7 +31,6 @@
     DownmixImpl() = default;
     ~DownmixImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
index fdc16e3..8324473 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.cpp
@@ -213,11 +213,12 @@
     RETURN_OK_IF(mState != State::INIT);
     mImplContext = createContext(common);
     RETURN_IF(!mContext || !mImplContext, EX_NULL_POINTER, "createContextFailed");
-    int version = 0;
-    RETURN_IF(!getInterfaceVersion(&version).isOk(), EX_UNSUPPORTED_OPERATION,
+    RETURN_IF(!getInterfaceVersion(&mVersion).isOk(), EX_UNSUPPORTED_OPERATION,
               "FailedToGetInterfaceVersion");
     mImplContext->setVersion(version);
     mEventFlag = mImplContext->getStatusEventFlag();
+    mDataMqNotEmptyEf =
+            mVersion >= kReopenSupportedVersion ? kEventFlagDataMqNotEmpty : kEventFlagNotEmpty;
 
     if (specific.has_value()) {
         RETURN_IF_ASTATUS_NOT_OK(setParameterSpecific(specific.value()), "setSpecParamErr");
@@ -231,8 +232,9 @@
 
     mState = State::IDLE;
     mContext->dupeFmq(ret);
-    RETURN_IF(createThread(getEffectName()) != RetCode::SUCCESS, EX_UNSUPPORTED_OPERATION,
-              "FailedToCreateWorker");
+    RETURN_IF(createThread(getEffectNameWithVersion()) != RetCode::SUCCESS,
+              EX_UNSUPPORTED_OPERATION, "FailedToCreateWorker");
+    LOG(INFO) << getEffectNameWithVersion() << __func__;
     return ndk::ScopedAStatus::ok();
 }
 
@@ -242,27 +244,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus DynamicsProcessingImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            return ndk::ScopedAStatus::ok();
-        case CommandId::STOP:
-            mContext->disable();
-            return ndk::ScopedAStatus::ok();
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            return ndk::ScopedAStatus::ok();
-        default:
-            // Need this default handling for vendor extendable CommandId::VENDOR_COMMAND_*
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-}
-
 bool DynamicsProcessingImpl::isParamInRange(const Parameter::Specific& specific) {
     auto& dp = specific.get<Parameter::Specific::dynamicsProcessing>();
     return DynamicsProcessingRanges::isParamInRange(dp, kRanges);
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
index e850ba4..b34cdcf 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessing.h
@@ -36,7 +36,6 @@
     ndk::ScopedAStatus open(const Parameter::Common& common,
                             const std::optional<Parameter::Specific>& specific,
                             OpenEffectReturn* ret) override;
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
index ada301b..fd4e615 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.cpp
@@ -48,10 +48,11 @@
     return RetCode::SUCCESS;
 }
 
-void DynamicsProcessingContext::reset() {
+RetCode DynamicsProcessingContext::reset() {
     if (mDpFreq != nullptr) {
-        mDpFreq.reset();
+        mDpFreq->reset();
     }
+    return RetCode::SUCCESS;
 }
 
 RetCode DynamicsProcessingContext::setCommon(const Parameter::Common& common) {
diff --git a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
index ce657db..15c6811 100644
--- a/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
+++ b/media/libeffects/dynamicsproc/aidl/DynamicsProcessingContext.h
@@ -37,9 +37,9 @@
   public:
     DynamicsProcessingContext(int statusDepth, const Parameter::Common& common);
     ~DynamicsProcessingContext() = default;
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
+    RetCode reset() override;
 
     // override EffectContext::setCommon to update mChannelCount
     RetCode setCommon(const Parameter::Common& common) override;
diff --git a/media/libeffects/hapticgenerator/Android.bp b/media/libeffects/hapticgenerator/Android.bp
index 9975f75..e4ac38e 100644
--- a/media/libeffects/hapticgenerator/Android.bp
+++ b/media/libeffects/hapticgenerator/Android.bp
@@ -37,6 +37,14 @@
     header_libs: [
         "libaudioeffects",
     ],
+    cflags: [
+        // This is needed for the non-zero coefficients optimization for
+        // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
+        // with/without `-ffast-math` for more context.
+        "-ffast-math",
+        "-fhonor-infinities",
+        "-fhonor-nans",
+    ],
     relative_install_path: "soundfx",
 }
 
@@ -59,10 +67,6 @@
         "-O2",
         "-Wall",
         "-Werror",
-        // This is needed for the non-zero coefficients optimization for
-        // BiquadFilter. Try the biquad_filter_benchmark test in audio_utils
-        // with/without `-ffast-math` for more context.
-        "-ffast-math",
         "-fvisibility=hidden",
     ],
 }
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index f60d616..258dca2 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -35,12 +35,15 @@
 #include <audio_utils/format.h>
 #include <audio_utils/safe_math.h>
 #include <system/audio.h>
+#include <system/audio_effects/audio_effects_utils.h>
 
 static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
 static constexpr float DEFAULT_BSF_ZERO_Q = 8.0f;
 static constexpr float DEFAULT_BSF_POLE_Q = 4.0f;
 static constexpr float DEFAULT_DISTORTION_OUTPUT_GAIN = 1.5f;
 
+using android::effect::utils::EffectParamReader;
+
 // This is the only symbol that needs to be exported
 __attribute__ ((visibility ("default")))
 audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
@@ -307,28 +310,40 @@
     return 0;
 }
 
-int HapticGenerator_SetParameter(struct HapticGeneratorContext *context,
-                                 int32_t param,
-                                 uint32_t size,
-                                 void *value) {
-    switch (param) {
+int HapticGenerator_SetParameter(struct HapticGeneratorContext *context, effect_param_t* param) {
+    if (param == nullptr) {
+        ALOGE("%s invalid effect_param_t is nullptr", __func__);
+        return -EINVAL;
+    }
+    int32_t paramType;
+    EffectParamReader reader(*param);
+    reader.readFromParameter(&paramType);
+
+    switch (paramType) {
     case HG_PARAM_HAPTIC_INTENSITY: {
-        if (value == nullptr || size != (uint32_t) (2 * sizeof(int) + sizeof(float))) {
+        if (param->vsize != (sizeof(int32_t) + sizeof(os::HapticScale))) {
+            ALOGE("%s invalid haptic intensity param size %s", __func__, reader.toString().c_str());
             return -EINVAL;
         }
-        const int id = *(int *) value;
-        const os::HapticLevel hapticLevel = static_cast<os::HapticLevel>(*((int *) value + 1));
-        const float adaptiveScaleFactor = (*((float *) value + 2));
-        const os::HapticScale hapticScale = {hapticLevel, adaptiveScaleFactor};
-        ALOGD("Updating haptic scale, hapticLevel=%d, adaptiveScaleFactor=%f",
-              static_cast<int>(hapticLevel), adaptiveScaleFactor);
+        int32_t id, scaleLevel;
+        float scaleFactor, adaptiveScaleFactor;
+        if (reader.readFromValue(&id) != OK || reader.readFromValue(&scaleLevel) != OK ||
+            reader.readFromValue(&scaleFactor) != OK ||
+            reader.readFromValue(&adaptiveScaleFactor) != OK) {
+            ALOGE("%s error reading haptic intensity %s", __func__, reader.toString().c_str());
+            return -EINVAL;
+        }
+        os::HapticScale hapticScale(static_cast<os::HapticLevel>(scaleLevel), scaleFactor,
+                                    adaptiveScaleFactor);
+        ALOGD("Updating haptic scale, %s", hapticScale.toString().c_str());
         if (hapticScale.isScaleMute()) {
             context->param.id2HapticScale.erase(id);
         } else {
             context->param.id2HapticScale.emplace(id, hapticScale);
         }
         context->param.maxHapticScale = hapticScale;
-        for (const auto&[id, scale] : context->param.id2HapticScale) {
+        for (const auto&[_, scale] : context->param.id2HapticScale) {
+            // TODO(b/360314386): update to use new scale factors
             if (scale.getLevel() > context->param.maxHapticScale.getLevel()) {
                 context->param.maxHapticScale = scale;
             }
@@ -336,12 +351,17 @@
         break;
     }
     case HG_PARAM_VIBRATOR_INFO: {
-        if (value == nullptr || size != 3 * sizeof(float)) {
+        if (param->vsize != (3 * sizeof(float))) {
+            ALOGE("%s invalid vibrator info param size %s", __func__, reader.toString().c_str());
             return -EINVAL;
         }
-        const float resonantFrequency = *(float*) value;
-        const float qFactor = *((float *) value + 1);
-        const float maxAmplitude = *((float *) value + 2);
+        float resonantFrequency, qFactor, maxAmplitude;
+        if (reader.readFromValue(&resonantFrequency) != OK ||
+            reader.readFromValue(&qFactor) != OK ||
+            reader.readFromValue(&maxAmplitude) != OK) {
+            ALOGE("%s error reading vibrator info %s", __func__, reader.toString().c_str());
+            return -EINVAL;
+        }
         context->param.resonantFrequency =
                 audio_utils::safe_isnan(resonantFrequency) ? DEFAULT_RESONANT_FREQUENCY
                                                            : resonantFrequency;
@@ -369,7 +389,7 @@
         HapticGenerator_Reset(context);
     } break;
     default:
-        ALOGW("Unknown param: %d", param);
+        ALOGW("Unknown param: %d", paramType);
         return -EINVAL;
     }
 
@@ -573,8 +593,7 @@
                 return -EINVAL;
             }
             effect_param_t *cmd = (effect_param_t *) cmdData;
-            *(int *) replyData = HapticGenerator_SetParameter(
-                    context, *(int32_t *) cmd->data, cmd->vsize, cmd->data + sizeof(int32_t));
+            *(int *) replyData = HapticGenerator_SetParameter(context, cmd);
         }
             break;
 
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
index b803ee4..dd29e86 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.cpp
@@ -55,6 +55,17 @@
 
 namespace aidl::android::hardware::audio::effect {
 
+const std::vector<Range::HapticGeneratorRange> kHapticRange = {
+        MAKE_RANGE(HapticGenerator, vibratorInfo,
+                   HapticGenerator::VibratorInformation(
+                           {.resonantFrequencyHz = 1, .qFactor = 1, .maxAmplitude = -1}),
+                   HapticGenerator::VibratorInformation(
+                           {.resonantFrequencyHz = std::numeric_limits<float>::max(),
+                            .qFactor = std::numeric_limits<float>::max(),
+                            .maxAmplitude = 1}))};
+
+static const Capability kHapticCap = {.range = kHapticRange};
+
 const std::string HapticGeneratorImpl::kEffectName = "Haptic Generator";
 const Descriptor HapticGeneratorImpl::kDescriptor = {
         .common = {.id = {.type = getEffectTypeUuidHapticGenerator(),
@@ -62,7 +73,8 @@
                           .proxy = std::nullopt},
                    .flags = {.type = Flags::Type::INSERT, .insert = Flags::Insert::FIRST},
                    .name = HapticGeneratorImpl::kEffectName,
-                   .implementor = "The Android Open Source Project"}};
+                   .implementor = "The Android Open Source Project"},
+        .capability = kHapticCap};
 
 ndk::ScopedAStatus HapticGeneratorImpl::getDescriptor(Descriptor* _aidl_return) {
     RETURN_IF(!_aidl_return, EX_ILLEGAL_ARGUMENT, "Parameter:nullptr");
@@ -70,32 +82,14 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus HapticGeneratorImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->reset();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus HapticGeneratorImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::hapticGenerator != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
     RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
 
     auto& hgParam = specific.get<Parameter::Specific::hapticGenerator>();
+    RETURN_IF(!inRange(hgParam, kHapticRange), EX_ILLEGAL_ARGUMENT, "outOfRange");
+
     auto tag = hgParam.getTag();
 
     switch (tag) {
diff --git a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
index a775f06..8bae024 100644
--- a/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
+++ b/media/libeffects/hapticgenerator/aidl/EffectHapticGenerator.h
@@ -30,7 +30,6 @@
     HapticGeneratorImpl() = default;
     ~HapticGeneratorImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index 0a04250..d8f9093 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -14,30 +14,47 @@
  * limitations under the License.
  */
 
-#include <cstddef>
 #define LOG_TAG "AHAL_HapticGeneratorContext"
 
-#include <Utils.h>
+#include "HapticGeneratorContext.h"
 #include <android-base/logging.h>
 #include <android-base/parsedouble.h>
 #include <android-base/properties.h>
+#include <audio_utils/primitives.h>
+#include <audio_utils/safe_math.h>
+#include <Utils.h>
 
-#include "HapticGeneratorContext.h"
+#include <cstddef>
+
+using aidl::android::hardware::audio::common::getChannelCount;
+using aidl::android::hardware::audio::common::getPcmSampleSizeInBytes;
+using aidl::android::media::audio::common::AudioChannelLayout;
 
 namespace aidl::android::hardware::audio::effect {
 
 HapticGeneratorContext::HapticGeneratorContext(int statusDepth, const Parameter::Common& common)
     : EffectContext(statusDepth, common) {
     mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
-    mSampleRate = common.input.base.sampleRate;
-    mFrameCount = common.input.frameCount;
-    init_params(common.input.base.channelMask, common.output.base.channelMask);
+
+    mParams.mMaxHapticScale = {.scale = HapticGenerator::VibratorScale::MUTE};
+    mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
+    mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+    mParams.mVibratorInfo.maxAmplitude = 0.f;
+
+    init_params(common);
+    mState = HAPTIC_GENERATOR_STATE_INITIALIZED;
 }
 
 HapticGeneratorContext::~HapticGeneratorContext() {
     mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
 }
 
+// Override EffectImpl::setCommon for HapticGenerator because we need init_params
+RetCode HapticGeneratorContext::setCommon(const Parameter::Common& common) {
+    init_params(common);
+    return EffectContext::setCommon(common);
+}
+
 RetCode HapticGeneratorContext::enable() {
     if (mState != HAPTIC_GENERATOR_STATE_INITIALIZED) {
         return RetCode::ERROR_EFFECT_LIB_ERROR;
@@ -54,7 +71,7 @@
     return RetCode::SUCCESS;
 }
 
-void HapticGeneratorContext::reset() {
+RetCode HapticGeneratorContext::reset() {
     for (auto& filter : mProcessorsRecord.filters) {
         filter->clear();
     }
@@ -64,28 +81,34 @@
     for (auto& distortion : mProcessorsRecord.distortions) {
         distortion->clear();
     }
+    return RetCode::SUCCESS;
 }
 
 RetCode HapticGeneratorContext::setHgHapticScales(
         const std::vector<HapticGenerator::HapticScale>& hapticScales) {
     for (auto hapticScale : hapticScales) {
-        mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale.scale);
+        mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale);
     }
-    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
+    mParams.mMaxHapticScale = {.scale = HapticGenerator::VibratorScale::MUTE};
     for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
-        mParams.mMaxVibratorScale = std::max(mParams.mMaxVibratorScale, vibratorScale);
+        // TODO(b/360314386): update to use new scale factors
+        if (vibratorScale.scale > mParams.mMaxHapticScale.scale) {
+            mParams.mMaxHapticScale = vibratorScale;
+        }
     }
+    LOG(INFO) << " HapticGenerator VibratorScale set to "
+              << toString(mParams.mMaxHapticScale.scale);
     return RetCode::SUCCESS;
 }
 
-HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() {
+HapticGenerator::VibratorInformation HapticGeneratorContext::getHgVibratorInformation() const {
     return mParams.mVibratorInfo;
 }
 
-std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() {
+std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() const {
     std::vector<HapticGenerator::HapticScale> result;
-    for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
-        result.push_back({id, vibratorScale});
+    for (const auto& [_, hapticScale] : mParams.mHapticScales) {
+        result.push_back(hapticScale);
     }
     return result;
 }
@@ -93,6 +116,15 @@
 RetCode HapticGeneratorContext::setHgVibratorInformation(
         const HapticGenerator::VibratorInformation& vibratorInfo) {
     mParams.mVibratorInfo = vibratorInfo;
+    if (::android::audio_utils::safe_isnan(mParams.mVibratorInfo.resonantFrequencyHz)) {
+        LOG(WARNING) << __func__ << " resonantFrequencyHz reset from nan to "
+                     << DEFAULT_RESONANT_FREQUENCY;
+        mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
+    }
+    if (::android::audio_utils::safe_isnan(mParams.mVibratorInfo.qFactor)) {
+        LOG(WARNING) << __func__ << " qFactor reset from nan to " << DEFAULT_BSF_ZERO_Q;
+        mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+    }
 
     if (mProcessorsRecord.bpf != nullptr) {
         mProcessorsRecord.bpf->setCoefficients(::android::audio_effect::haptic_generator::bpfCoefs(
@@ -117,25 +149,19 @@
     auto frameSize = getInputFrameSize();
     RETURN_VALUE_IF(0 == frameSize, status, "zeroFrameSize");
 
-    // The audio data must not be modified but just written to
-    // output buffer according the access mode.
-    if (in != out) {
-        for (int i = 0; i < samples; i++) {
-            out[i] = in[i];
-        }
-    }
-
     if (mState != HAPTIC_GENERATOR_STATE_ACTIVE) {
+        LOG(WARNING) << " HapticGenerator in wrong state " << mState;
         return status;
     }
 
-    if (mParams.mMaxVibratorScale == HapticGenerator::VibratorScale::MUTE) {
+    if (mParams.mMaxHapticScale.scale == HapticGenerator::VibratorScale::MUTE) {
         // Haptic channels are muted, not need to generate haptic data.
         return {STATUS_OK, samples, samples};
     }
 
     // Resize buffer if the haptic sample count is greater than buffer size.
-    size_t hapticSampleCount = mFrameCount * mParams.mHapticChannelCount;
+    const size_t hapticSampleCount = mFrameCount * mParams.mHapticChannelCount;
+    const size_t audioSampleCount = mFrameCount * mParams.mAudioChannelCount;
     if (hapticSampleCount > mInputBuffer.size()) {
         // The inputBuffer and outputBuffer must have the same size, which must be at least
         // the haptic sample count.
@@ -155,45 +181,48 @@
             runProcessingChain(mInputBuffer.data(), mOutputBuffer.data(), mFrameCount);
     ::android::os::scaleHapticData(
             hapticOutBuffer, hapticSampleCount,
-            {/*level=*/static_cast<::android::os::HapticLevel>(mParams.mMaxVibratorScale) },
-            mParams.mVibratorInfo.qFactor);
+            ::android::os::HapticScale(
+                    static_cast<::android::os::HapticLevel>(mParams.mMaxHapticScale.scale),
+                    mParams.mMaxHapticScale.scaleFactor,
+                    mParams.mMaxHapticScale.adaptiveScaleFactor),
+            mParams.mVibratorInfo.maxAmplitude /* limit */);
 
     // For haptic data, the haptic playback thread will copy the data from effect input
     // buffer, which contains haptic data at the end of the buffer, directly to sink buffer.
-    // In that case, copy haptic data to input buffer instead of output buffer.
-    // Note: this may not work with rpc/binder calls
-    for (size_t i = 0; i < hapticSampleCount; ++i) {
-        in[samples + i] = hapticOutBuffer[i];
-    }
-    return {STATUS_OK, samples, static_cast<int32_t>(samples + hapticSampleCount)};
+    // In AIDL only output buffer is send back to the audio framework via FMQ. Here the effect copy
+    // the generated haptic data to the target position of output buffer, the framework then append
+    // it to the same position of input buffer.
+    memcpy_to_float_from_float_with_clamping(out + audioSampleCount, hapticOutBuffer,
+                                             hapticSampleCount, 2.f /* absMax */);
+    return {STATUS_OK, samples, samples};
 }
 
-void HapticGeneratorContext::init_params(media::audio::common::AudioChannelLayout inputChMask,
-                                         media::audio::common::AudioChannelLayout outputChMask) {
-    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
-    mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
-    mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
+void HapticGeneratorContext::init_params(const Parameter::Common& common) {
+    mSampleRate = common.input.base.sampleRate;
+    mFrameCount = common.input.frameCount;
 
     mParams.mAudioChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            inputChMask, ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
+            common.input.base.channelMask,
+            ~media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     mParams.mHapticChannelCount = ::aidl::android::hardware::audio::common::getChannelCount(
-            outputChMask, media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
+            common.output.base.channelMask,
+            media::audio::common::AudioChannelLayout::LAYOUT_HAPTIC_AB);
     LOG_ALWAYS_FATAL_IF(mParams.mHapticChannelCount > 2, "haptic channel count is too large");
     for (int i = 0; i < mParams.mHapticChannelCount; ++i) {
         // By default, use the first audio channel to generate haptic channels.
         mParams.mHapticChannelSource[i] = 0;
     }
-
-    mState = HAPTIC_GENERATOR_STATE_INITIALIZED;
+    configure();
+    LOG(DEBUG) << " HapticGenerator init context:\n" << contextToString();
 }
 
-float HapticGeneratorContext::getDistortionOutputGain() {
+float HapticGeneratorContext::getDistortionOutputGain() const {
     float distortionOutputGain = getFloatProperty(
             "vendor.audio.hapticgenerator.distortion.output.gain", DEFAULT_DISTORTION_OUTPUT_GAIN);
     return distortionOutputGain;
 }
 
-float HapticGeneratorContext::getFloatProperty(const std::string& key, float defaultValue) {
+float HapticGeneratorContext::getFloatProperty(const std::string& key, float defaultValue) const {
     float result;
     std::string value = ::android::base::GetProperty(key, "");
     if (!value.empty() && ::android::base::ParseFloat(value, &result)) {
@@ -322,4 +351,35 @@
     return in;
 }
 
+std::string HapticGeneratorContext::paramToString(const struct HapticGeneratorParam& param) const {
+    std::stringstream ss;
+    ss << "\t\tHapticGenerator Parameters:\n";
+    ss << "\t\t- mHapticChannelCount: " << param.mHapticChannelCount << '\n';
+    ss << "\t\t- mAudioChannelCount: " << param.mAudioChannelCount << '\n';
+    ss << "\t\t- mHapticChannelSource: " << param.mHapticChannelSource[0] << ", "
+       << param.mHapticChannelSource[1] << '\n';
+    ss << "\t\t- mMaxHapticScale: " << ::android::internal::ToString(param.mMaxHapticScale.scale)
+       << ", scaleFactor=" << param.mMaxHapticScale.scaleFactor
+       << ", adaptiveScaleFactor=" << param.mMaxHapticScale.adaptiveScaleFactor << '\n';
+    ss << "\t\t- mVibratorInfo: " << param.mVibratorInfo.toString() << '\n';
+    for (const auto& it : param.mHapticScales)
+        ss << "\t\t\t" << it.first << ": " << toString(it.second.scale) << '\n';
+
+    return ss.str();
+}
+
+std::string HapticGeneratorContext::contextToString() const {
+    std::stringstream ss;
+    ss << "\t\tHapticGenerator Context:\n";
+    ss << "\t\t- state: " << mState << '\n';
+    ss << "\t\t- bpf Q: " << DEFAULT_BPF_Q << '\n';
+    ss << "\t\t- slow env normalization power: " << DEFAULT_SLOW_ENV_NORMALIZATION_POWER << '\n';
+    ss << "\t\t- distortion corner frequency: " << DEFAULT_DISTORTION_CORNER_FREQUENCY << '\n';
+    ss << "\t\t- distortion input gain: " << DEFAULT_DISTORTION_INPUT_GAIN << '\n';
+    ss << "\t\t- distortion cube threshold: " << DEFAULT_DISTORTION_CUBE_THRESHOLD << '\n';
+    ss << "\t\t- distortion output gain: " << getDistortionOutputGain() << '\n';
+    ss << paramToString(mParams) << "\n";
+    return ss.str();
+}
+
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index 3a2ad1c..37532f6 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -16,11 +16,13 @@
 
 #pragma once
 
-#include <vibrator/ExternalVibrationUtils.h>
-#include <map>
-
-#include "Processors.h"
 #include "effect-impl/EffectContext.h"
+#include "Processors.h"
+
+#include <vibrator/ExternalVibrationUtils.h>
+
+#include <cstddef>
+#include <map>
 
 namespace aidl::android::hardware::audio::effect {
 
@@ -39,10 +41,9 @@
     int mHapticChannelCount;
     int mAudioChannelCount;
 
-    HapticGenerator::HapticScale mHapticScale;
-    std::map<int, HapticGenerator::VibratorScale> mHapticScales;
+    std::map<int, HapticGenerator::HapticScale> mHapticScales;
     // max intensity will be used to scale haptic data.
-    HapticGenerator::VibratorScale mMaxVibratorScale;
+    HapticGenerator::HapticScale mMaxHapticScale;
 
     HapticGenerator::VibratorInformation mVibratorInfo;
 };
@@ -64,18 +65,20 @@
   public:
     HapticGeneratorContext(int statusDepth, const Parameter::Common& common);
     ~HapticGeneratorContext();
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
+    RetCode reset() override;
 
     RetCode setHgHapticScales(const std::vector<HapticGenerator::HapticScale>& hapticScales);
-    std::vector<HapticGenerator::HapticScale> getHgHapticScales();
+    std::vector<HapticGenerator::HapticScale> getHgHapticScales() const;
 
     RetCode setHgVibratorInformation(const HapticGenerator::VibratorInformation& vibratorInfo);
-    HapticGenerator::VibratorInformation getHgVibratorInformation();
+    HapticGenerator::VibratorInformation getHgVibratorInformation() const;
 
     IEffect::Status process(float* in, float* out, int samples);
 
+    RetCode setCommon(const Parameter::Common& common) override;
+
   private:
     static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
     static constexpr float DEFAULT_BSF_ZERO_Q = 8.0f;
@@ -108,15 +111,17 @@
     // intermediate buffer in the generating algorithm.
     std::vector<float> mOutputBuffer;
 
-    void init_params(media::audio::common::AudioChannelLayout inputChMask,
-                     media::audio::common::AudioChannelLayout outputChMask);
+    void init_params(const Parameter::Common& common);
     void configure();
 
-    float getDistortionOutputGain();
-    float getFloatProperty(const std::string& key, float defaultValue);
+    float getDistortionOutputGain() const;
+    float getFloatProperty(const std::string& key, float defaultValue) const;
     void addBiquadFilter(std::shared_ptr<HapticBiquadFilter> filter);
     void buildProcessingChain();
     float* runProcessingChain(float* buf1, float* buf2, size_t frameCount);
+
+    std::string paramToString(const struct HapticGeneratorParam& param) const;
+    std::string contextToString() const;
 };
 
 }  // namespace aidl::android::hardware::audio::effect
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
index f89606e..592fd60 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.cpp
@@ -70,27 +70,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus LoudnessEnhancerImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus LoudnessEnhancerImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::loudnessEnhancer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
diff --git a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
index 98bdc6b..1e050f3 100644
--- a/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
+++ b/media/libeffects/loudness/aidl/EffectLoudnessEnhancer.h
@@ -30,7 +30,6 @@
     LoudnessEnhancerImpl() = default;
     ~LoudnessEnhancerImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
index d8bcfc0..ac8b14a 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.cpp
@@ -43,17 +43,13 @@
     return RetCode::SUCCESS;
 }
 
-void LoudnessEnhancerContext::reset() {
-    float targetAmp = pow(10, mGain / 2000.0f);  // mB to linear amplification
+RetCode LoudnessEnhancerContext::setLeGain(int gainMb) {
+    float targetAmp = pow(10, gainMb / 2000.0f);  // mB to linear amplification
     if (mCompressor != nullptr) {
         // Get samplingRate from input
         mCompressor->Initialize(targetAmp, mCommon.input.base.sampleRate);
     }
-}
-
-RetCode LoudnessEnhancerContext::setLeGain(int gainMb) {
     mGain = gainMb;
-    reset();  // apply parameter update
     return RetCode::SUCCESS;
 }
 
diff --git a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
index 192b212..67ccd24 100644
--- a/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
+++ b/media/libeffects/loudness/aidl/LoudnessEnhancerContext.h
@@ -34,9 +34,8 @@
     LoudnessEnhancerContext(int statusDepth, const Parameter::Common& common);
     ~LoudnessEnhancerContext() = default;
 
-    RetCode enable();
-    RetCode disable();
-    void reset();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setLeGain(int gainMb);
     int getLeGain() const { return mGain; }
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index fff2feb..d5e3cf7 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -185,7 +185,6 @@
     }
     RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
                     RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
-    mEnabled = false;
     return limitLevel();
 }
 
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
index 044c8dd..e5ab40d 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.h
@@ -35,9 +35,9 @@
     void deInit();
     lvm::BundleEffectType getBundleType() const { return mType; }
 
-    RetCode enable();
+    RetCode enable() override;
     RetCode enableOperatingMode();
-    RetCode disable();
+    RetCode disable() override;
     RetCode disableOperatingMode();
 
     bool isDeviceSupportedBassBoost(
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
index daabdb7..e5373f3 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleTypes.h
@@ -65,9 +65,9 @@
          {5, 3, -1, 3, 5}}}; /* Rock Preset */
 
 static const std::vector<Equalizer::Preset> kEqPresets = {
-        {0, "Normal"},      {1, "Classical"}, {2, "Dance"}, {3, "Flat"}, {4, "Folk"},
-        {5, "Heavy Metal"}, {6, "Hip Hop"},   {7, "Jazz"},  {8, "Pop"},  {9, "Rock"}};
-
+        {-1, "Custom"}, {0, "Normal"}, {1, "Classical"},   {2, "Dance"},
+        {3, "Flat"},    {4, "Folk"},   {5, "Heavy Metal"}, {6, "Hip Hop"},
+        {7, "Jazz"},    {8, "Pop"},    {9, "Rock"}};
 
 const std::vector<Range::EqualizerRange> kEqRanges = {
         MAKE_RANGE(Equalizer, preset, 0, MAX_NUM_PRESETS - 1),
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
index 70c276d..2a81673 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.cpp
@@ -428,27 +428,6 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectBundleAidl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectBundleAidl::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
diff --git a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
index 429e941..479579b 100644
--- a/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
+++ b/media/libeffects/lvm/wrapper/Aidl/EffectBundleAidl.h
@@ -49,8 +49,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
index 4d369b1..201c659 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.cpp
@@ -361,27 +361,6 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectReverb::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectReverb::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
index e0771a1..4acac1d 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/EffectReverb.h
@@ -42,8 +42,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
index 44ea2a4..3ae3edc 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.cpp
@@ -19,6 +19,7 @@
 #define LOG_TAG "ReverbContext"
 #include <android-base/logging.h>
 #include <Utils.h>
+#include <audio_utils/primitives.h>
 
 #include "ReverbContext.h"
 #include "VectorArithmetic.h"
@@ -347,6 +348,15 @@
             mCommon.output.base.channelMask);
     int frameCount = mCommon.input.frameCount;
 
+    if (mBypass) {
+        if (isAuxiliary()) {
+            memset(out, 0, getOutputFrameSize() * frameCount);
+        } else {
+            memcpy_to_float_from_float_with_clamping(out, in, samples, 1);
+        }
+        return {STATUS_OK, samples, outChannels * frameCount};
+    }
+
     // Reverb only effects the stereo channels in multichannel source.
     if (channels < 1 || channels > LVM_MAX_CHANNELS) {
         LOG(ERROR) << __func__ << " process invalid PCM channels " << channels;
diff --git a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
index 44391f2..f55eac5 100644
--- a/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
+++ b/media/libeffects/lvm/wrapper/Reverb/aidl/ReverbContext.h
@@ -51,8 +51,8 @@
     RetCode init();
     void deInit();
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
 
     bool isAuxiliary();
     bool isPreset();
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
index 87d267b..4bc34e7 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.cpp
@@ -417,27 +417,6 @@
     return RetCode::SUCCESS;
 }
 
-ndk::ScopedAStatus EffectPreProcessing::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 // Processing method running in EffectWorker thread.
 IEffect::Status EffectPreProcessing::effectProcessImpl(float* in, float* out, int sampleToProcess) {
     IEffect::Status status = {EX_NULL_POINTER, 0, 0};
diff --git a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
index 9ce5597..31f5737 100644
--- a/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
+++ b/media/libeffects/preprocessing/aidl/EffectPreProcessing.h
@@ -43,8 +43,6 @@
     IEffect::Status effectProcessImpl(float* in, float* out, int samples)
             REQUIRES(mImplMutex) override;
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
-
     std::string getEffectName() override { return *mEffectName; }
 
   private:
diff --git a/media/libeffects/preprocessing/aidl/PreProcessingContext.h b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
index 11a2bea..1b9b77b 100644
--- a/media/libeffects/preprocessing/aidl/PreProcessingContext.h
+++ b/media/libeffects/preprocessing/aidl/PreProcessingContext.h
@@ -45,8 +45,8 @@
 
     PreProcessingEffectType getPreProcessingType() const { return mType; }
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
 
     RetCode setCommon(const Parameter::Common& common) override;
     void updateConfigs(const Parameter::Common& common);
diff --git a/media/libeffects/visualizer/aidl/Visualizer.cpp b/media/libeffects/visualizer/aidl/Visualizer.cpp
index 9b493d4..f4b9b25 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.cpp
+++ b/media/libeffects/visualizer/aidl/Visualizer.cpp
@@ -85,27 +85,6 @@
     return ndk::ScopedAStatus::ok();
 }
 
-ndk::ScopedAStatus VisualizerImpl::commandImpl(CommandId command) {
-    RETURN_IF(!mContext, EX_NULL_POINTER, "nullContext");
-    switch (command) {
-        case CommandId::START:
-            mContext->enable();
-            break;
-        case CommandId::STOP:
-            mContext->disable();
-            break;
-        case CommandId::RESET:
-            mContext->disable();
-            mContext->resetBuffer();
-            break;
-        default:
-            LOG(ERROR) << __func__ << " commandId " << toString(command) << " not supported";
-            return ndk::ScopedAStatus::fromExceptionCodeWithMessage(EX_ILLEGAL_ARGUMENT,
-                                                                    "commandIdNotSupported");
-    }
-    return ndk::ScopedAStatus::ok();
-}
-
 ndk::ScopedAStatus VisualizerImpl::setParameterSpecific(const Parameter::Specific& specific) {
     RETURN_IF(Parameter::Specific::visualizer != specific.getTag(), EX_ILLEGAL_ARGUMENT,
               "EffectNotSupported");
@@ -222,6 +201,7 @@
 RetCode VisualizerImpl::releaseContext() {
     if (mContext) {
         mContext->disable();
+        mContext->reset();
         mContext->resetBuffer();
     }
     return RetCode::SUCCESS;
diff --git a/media/libeffects/visualizer/aidl/Visualizer.h b/media/libeffects/visualizer/aidl/Visualizer.h
index 3180972..f25b78d 100644
--- a/media/libeffects/visualizer/aidl/Visualizer.h
+++ b/media/libeffects/visualizer/aidl/Visualizer.h
@@ -32,7 +32,6 @@
     VisualizerImpl() = default;
     ~VisualizerImpl() { cleanUp(); }
 
-    ndk::ScopedAStatus commandImpl(CommandId command) REQUIRES(mImplMutex) override;
     ndk::ScopedAStatus getDescriptor(Descriptor* _aidl_return) override;
     ndk::ScopedAStatus setParameterSpecific(const Parameter::Specific& specific)
             REQUIRES(mImplMutex) override;
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.cpp b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
index 1e08674..a368e52 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.cpp
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.cpp
@@ -57,7 +57,7 @@
 #endif
     mChannelCount = channelCount;
     mCommon = common;
-    std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
+    reset();
     return RetCode::SUCCESS;
 }
 
@@ -77,8 +77,9 @@
     return RetCode::SUCCESS;
 }
 
-void VisualizerContext::reset() {
+RetCode VisualizerContext::reset() {
     std::fill(mCaptureBuf.begin(), mCaptureBuf.end(), 0x80);
+    return RetCode::SUCCESS;
 }
 
 RetCode VisualizerContext::setCaptureSamples(int samples) {
@@ -109,7 +110,6 @@
     mDownstreamLatency = latency;
     return RetCode::SUCCESS;
 }
-
 int VisualizerContext::getDownstreamLatency() {
     return mDownstreamLatency;
 }
diff --git a/media/libeffects/visualizer/aidl/VisualizerContext.h b/media/libeffects/visualizer/aidl/VisualizerContext.h
index 9715e20..d4abbd3 100644
--- a/media/libeffects/visualizer/aidl/VisualizerContext.h
+++ b/media/libeffects/visualizer/aidl/VisualizerContext.h
@@ -36,10 +36,10 @@
 
     RetCode initParams(const Parameter::Common& common);
 
-    RetCode enable();
-    RetCode disable();
+    RetCode enable() override;
+    RetCode disable() override;
     // keep all parameters and reset buffer.
-    void reset();
+    RetCode reset() override;
 
     RetCode setCaptureSamples(int32_t captureSize);
     int32_t getCaptureSamples();
diff --git a/media/liberror/include/error/BinderResult.h b/media/liberror/include/error/BinderResult.h
new file mode 100644
index 0000000..1f1211c
--- /dev/null
+++ b/media/liberror/include/error/BinderResult.h
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#pragma once
+
+#include <binder/Status.h>
+#include <error/expected_utils.h>
+#include <utils/Errors.h>
+
+namespace android {
+namespace error {
+
+/**
+ * A convenience short-hand for base::expected, where the error type is a binder::Status, for use
+ * when implementing binder services.
+ * Clients need to link against libbinder, since this library is header only.
+ */
+template <typename T>
+using BinderResult = base::expected<T, binder::Status>;
+
+inline base::unexpected<binder::Status> unexpectedExceptionCode(int32_t exceptionCode,
+                                                                const char* s) {
+    return base::unexpected{binder::Status::fromExceptionCode(exceptionCode, s)};
+}
+
+inline base::unexpected<binder::Status> unexpectedServiceException(int32_t serviceSpecificCode,
+                                                                   const char* s) {
+    return base::unexpected{binder::Status::fromServiceSpecificError(serviceSpecificCode, s)};
+}
+
+}  // namespace error
+}  // namespace android
+
+inline std::string errorToString(const ::android::binder::Status& status) {
+    return std::string{status.toString8().c_str()};
+}
diff --git a/media/liberror/include/error/BinderStatusMatcher.h b/media/liberror/include/error/BinderStatusMatcher.h
new file mode 100644
index 0000000..11d9e65
--- /dev/null
+++ b/media/liberror/include/error/BinderStatusMatcher.h
@@ -0,0 +1,60 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <ostream>
+
+#include <binder/Status.h>
+
+namespace android::error {
+
+class BinderStatusMatcher {
+  public:
+    using is_gtest_matcher = void;
+
+    explicit BinderStatusMatcher(binder::Status status) : status_(std::move(status)) {}
+
+    static BinderStatusMatcher hasException(binder::Status::Exception ex) {
+        return BinderStatusMatcher(binder::Status::fromExceptionCode(ex));
+    }
+
+    static BinderStatusMatcher isOk() { return BinderStatusMatcher(binder::Status::ok()); }
+
+    bool MatchAndExplain(const binder::Status& value,
+                         ::testing::MatchResultListener* listener) const {
+        if (status_.exceptionCode() == value.exceptionCode() &&
+            status_.transactionError() == value.transactionError() &&
+            status_.serviceSpecificErrorCode() == value.serviceSpecificErrorCode()) {
+            return true;
+        }
+        *listener << "received binder status: " << value;
+        return false;
+    }
+
+    void DescribeTo(std::ostream* os) const { *os << "contains binder status " << status_; }
+
+    void DescribeNegationTo(std::ostream* os) const {
+        *os << "does not contain binder status " << status_;
+    }
+
+  private:
+    const binder::Status status_;
+};
+}  // namespace android::error
diff --git a/media/liberror/include/error/ExpectedMatchers.h b/media/liberror/include/error/ExpectedMatchers.h
new file mode 100644
index 0000000..b81adbf
--- /dev/null
+++ b/media/liberror/include/error/ExpectedMatchers.h
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <ostream>
+#include <type_traits>
+
+namespace android::error {
+
+/**
+ * Example Usage:
+ * Given a function with signature
+ *       Result<T, U> foo()
+ * Matchers can be used as follows:
+ *       EXPECT_THAT(foo(), IsOkAnd(Eq(T{})));
+ *       EXPECT_THAT(foo(), IsErrorAnd(Eq(U{})));
+ */
+template <typename ExpectedT>
+class IsOkAndImpl : public ::testing::MatcherInterface<ExpectedT> {
+  public:
+    using ValueT = std::remove_reference_t<ExpectedT>::value_type;
+
+    template <typename InnerMatcher>
+    explicit IsOkAndImpl(InnerMatcher innerMatcher)
+        : inner_matcher_(::testing::SafeMatcherCast<const ValueT&>(
+                  std::forward<InnerMatcher>(innerMatcher))) {}
+
+    bool MatchAndExplain(ExpectedT val, ::testing::MatchResultListener* listener) const {
+        if (!val.has_value()) {
+            *listener << "which has error " << ::testing::PrintToString(val.error());
+            return false;
+        }
+        const auto res = inner_matcher_.MatchAndExplain(val.value(), listener);
+        if (!res) {
+            *listener << "which has value " << ::testing::PrintToString(val.value());
+        }
+        return res;
+    }
+
+    void DescribeTo(std::ostream* os) const {
+        *os << "contains expected value which ";
+        inner_matcher_.DescribeTo(os);
+    }
+
+    void DescribeNegationTo(std::ostream* os) const {
+        *os << "does not contain expected, or contains expected value which ";
+        inner_matcher_.DescribeNegationTo(os);
+    }
+
+  private:
+    ::testing::Matcher<const ValueT&> inner_matcher_;
+};
+
+template <typename InnerMatcher>
+class IsOkAnd {
+  public:
+    explicit IsOkAnd(InnerMatcher innerMatcher) : inner_matcher_(std::move(innerMatcher)) {}
+
+    template <typename T>
+    operator ::testing::Matcher<T>() const {
+        return ::testing::Matcher<T>{new IsOkAndImpl<const T&>(inner_matcher_)};
+    }
+
+  private:
+    InnerMatcher inner_matcher_;
+};
+
+template <typename ExpectedT>
+class IsErrorAndImpl : public ::testing::MatcherInterface<ExpectedT> {
+  public:
+    using ErrorT = typename std::remove_reference_t<ExpectedT>::error_type;
+
+    template <typename InnerMatcher>
+    explicit IsErrorAndImpl(InnerMatcher innerMatcher)
+        : inner_matcher_(::testing::SafeMatcherCast<const ErrorT&>(
+                  std::forward<InnerMatcher>(innerMatcher))) {}
+
+    bool MatchAndExplain(ExpectedT val, ::testing::MatchResultListener* listener) const {
+        if (val.has_value()) {
+            *listener << "which has value " << ::testing::PrintToString(val.value());
+            return false;
+        }
+
+        const auto res = inner_matcher_.MatchAndExplain(val.error(), listener);
+        if (!res) {
+            *listener << "which has error " << ::testing::PrintToString(val.error());
+        }
+        return res;
+    }
+
+    void DescribeTo(std::ostream* os) const {
+        *os << "contains error value which ";
+        inner_matcher_.DescribeTo(os);
+    }
+
+    void DescribeNegationTo(std::ostream* os) const {
+        *os << "does not contain error value, or contains error value which ";
+        inner_matcher_.DescribeNegationTo(os);
+    }
+
+  private:
+    ::testing::Matcher<const ErrorT&> inner_matcher_;
+};
+
+template <typename InnerMatcher>
+class IsErrorAnd {
+  public:
+    explicit IsErrorAnd(InnerMatcher innerMatcher) : inner_matcher_(std::move(innerMatcher)) {}
+
+    template <typename T>
+    operator ::testing::Matcher<T>() const {
+        return ::testing::Matcher<T>{new IsErrorAndImpl<const T&>(inner_matcher_)};
+    }
+
+  private:
+    InnerMatcher inner_matcher_;
+};
+
+}  // namespace android::error
diff --git a/media/libheif/OWNERS b/media/libheif/OWNERS
new file mode 100644
index 0000000..a61ad21
--- /dev/null
+++ b/media/libheif/OWNERS
@@ -0,0 +1,2 @@
+include platform/frameworks/av:/media/janitors/avic_OWNERS
+include platform/frameworks/av:/media/janitors/codec_OWNERS
\ No newline at end of file
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 840897f..8a962c6 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -81,9 +81,6 @@
 cc_library_shared {
     name: "libmedia_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
@@ -258,6 +255,45 @@
 }
 
 cc_library_shared {
+    name: "libmedia_codeclist_capabilities",
+
+    srcs: [
+        "AudioCapabilities.cpp",
+        "CodecCapabilities.cpp",
+        "CodecCapabilitiesUtils.cpp",
+    ],
+
+    local_include_dirs: [
+        "include",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    export_include_dirs: [
+        "include",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wno-error=deprecated-declarations",
+        "-Wall",
+    ],
+
+    sanitize: {
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+        cfi: true,
+    },
+}
+
+cc_library_shared {
     name: "libmedia_codeclist",
 
     srcs: [
@@ -273,6 +309,7 @@
         "android.hardware.media.omx@1.0",
         "libbinder",
         "liblog",
+        "libmedia_codeclist_capabilities",
         "libstagefright_foundation",
         "libutils",
     ],
@@ -281,10 +318,6 @@
         "android.media.codec-aconfig-cc",
     ],
 
-    include_dirs: [
-        "system/libhidl/transport/token/1.0/utils/include",
-    ],
-
     export_include_dirs: [
         "include",
     ],
@@ -367,7 +400,6 @@
         "av-types-aidl-cpp",
         "liblog",
         "libcutils",
-        "libprocessgroup",
         "libutils",
         "libbinder",
         "libbinder_ndk",
diff --git a/media/libmedia/AudioCapabilities.cpp b/media/libmedia/AudioCapabilities.cpp
new file mode 100644
index 0000000..e8cf517
--- /dev/null
+++ b/media/libmedia/AudioCapabilities.cpp
@@ -0,0 +1,394 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "AudioCapabilities"
+
+#include <android-base/strings.h>
+#include <android-base/properties.h>
+
+#include <media/AudioCapabilities.h>
+#include <media/CodecCapabilities.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+namespace android {
+
+const Range<int>& AudioCapabilities::getBitrateRange() const {
+    return mBitrateRange;
+}
+
+const std::vector<int>& AudioCapabilities::getSupportedSampleRates() const {
+    return mSampleRates;
+}
+
+const std::vector<Range<int>>&
+        AudioCapabilities::getSupportedSampleRateRanges() const {
+    return mSampleRateRanges;
+}
+
+int AudioCapabilities::getMaxInputChannelCount() const {
+    int overallMax = 0;
+    for (int i = mInputChannelRanges.size() - 1; i >= 0; i--) {
+        int lmax = mInputChannelRanges[i].upper();
+        if (lmax > overallMax) {
+            overallMax = lmax;
+        }
+    }
+    return overallMax;
+}
+
+int AudioCapabilities::getMinInputChannelCount() const {
+    int overallMin = MAX_INPUT_CHANNEL_COUNT;
+    for (int i = mInputChannelRanges.size() - 1; i >= 0; i--) {
+        int lmin = mInputChannelRanges[i].lower();
+        if (lmin < overallMin) {
+            overallMin = lmin;
+        }
+    }
+    return overallMin;
+}
+
+const std::vector<Range<int>>&
+        AudioCapabilities::getInputChannelCountRanges() const {
+    return mInputChannelRanges;
+}
+
+// static
+std::shared_ptr<AudioCapabilities> AudioCapabilities::Create(std::string mediaType,
+        std::vector<ProfileLevel> profLevs, const sp<AMessage> &format) {
+    std::shared_ptr<AudioCapabilities> caps(new AudioCapabilities());
+    caps->init(mediaType, profLevs, format);
+    return caps;
+}
+
+void AudioCapabilities::init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+        const sp<AMessage> &format) {
+    mMediaType = mediaType;
+    mProfileLevels = profLevs;
+    mError = 0;
+
+    initWithPlatformLimits();
+    applyLevelLimits();
+    parseFromInfo(format);
+}
+
+void AudioCapabilities::initWithPlatformLimits() {
+    mBitrateRange = Range<int>(0, INT_MAX);
+    mInputChannelRanges.push_back(Range<int>(1, MAX_INPUT_CHANNEL_COUNT));
+
+    const int minSampleRate = base::GetIntProperty("ro.mediacodec.min_sample_rate", 7350);
+    const int maxSampleRate = base::GetIntProperty("ro.mediacodec.max_sample_rate", 192000);
+    mSampleRateRanges.push_back(Range<int>(minSampleRate, maxSampleRate));
+}
+
+bool AudioCapabilities::supports(int sampleRate, int inputChannels) {
+    // channels and sample rates are checked orthogonally
+    if (inputChannels != 0
+            && !std::any_of(mInputChannelRanges.begin(), mInputChannelRanges.end(),
+            [inputChannels](const Range<int> &a) { return a.contains(inputChannels); })) {
+        return false;
+    }
+    if (sampleRate != 0
+            && !std::any_of(mSampleRateRanges.begin(), mSampleRateRanges.end(),
+            [sampleRate](const Range<int> &a) { return a.contains(sampleRate); })) {
+        return false;
+    }
+    return true;
+}
+
+bool AudioCapabilities::isSampleRateSupported(int sampleRate) {
+    return supports(sampleRate, 0);
+}
+
+void AudioCapabilities::limitSampleRates(std::vector<int> rates) {
+    std::vector<Range<int>> sampleRateRanges;
+    std::sort(rates.begin(), rates.end());
+    for (int rate : rates) {
+        if (supports(rate, 0 /* channels */)) {
+            sampleRateRanges.push_back(Range<int>(rate, rate));
+        }
+    }
+    mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, sampleRateRanges);
+    createDiscreteSampleRates();
+}
+
+void AudioCapabilities::createDiscreteSampleRates() {
+    mSampleRates.clear();
+    for (int i = 0; i < mSampleRateRanges.size(); i++) {
+        mSampleRates.push_back(mSampleRateRanges[i].lower());
+    }
+}
+
+void AudioCapabilities::limitSampleRates(std::vector<Range<int>> rateRanges) {
+    sortDistinctRanges(&rateRanges);
+    mSampleRateRanges = intersectSortedDistinctRanges(mSampleRateRanges, rateRanges);
+    // check if all values are discrete
+    for (Range<int> range: mSampleRateRanges) {
+        if (range.lower() != range.upper()) {
+            mSampleRates.clear();
+            return;
+        }
+    }
+    createDiscreteSampleRates();
+}
+
+void AudioCapabilities::applyLevelLimits() {
+    std::vector<int> sampleRates;
+    std::optional<Range<int>> sampleRateRange;
+    std::optional<Range<int>> bitRates;
+    int maxChannels = MAX_INPUT_CHANNEL_COUNT;
+
+    // const char *mediaType = mMediaType.c_str();
+    if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MPEG)) {
+        sampleRates = {
+                8000, 11025, 12000,
+                16000, 22050, 24000,
+                32000, 44100, 48000 };
+        bitRates = Range<int>(8000, 320000);
+        maxChannels = 2;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_NB)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int>(4750, 12200);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AMR_WB)) {
+        sampleRates = { 16000 };
+        bitRates = Range<int>(6600, 23850);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AAC)) {
+        sampleRates = {
+                7350, 8000,
+                11025, 12000, 16000,
+                22050, 24000, 32000,
+                44100, 48000, 64000,
+                88200, 96000 };
+        bitRates = Range<int>(8000, 510000);
+        maxChannels = 48;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_VORBIS)) {
+        bitRates = Range<int>(32000, 500000);
+        sampleRateRange = Range<int>(8000, 192000);
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_OPUS)) {
+        bitRates = Range<int>(6000, 510000);
+        sampleRates = { 8000, 12000, 16000, 24000, 48000 };
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_RAW)) {
+        sampleRateRange = Range<int>(1, 192000);
+        bitRates = Range<int>(1, 10000000);
+        maxChannels = MAX_NUM_CHANNELS;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_FLAC)) {
+        sampleRateRange = Range<int>(1, 655350);
+        // lossless codec, so bitrate is ignored
+        maxChannels = 255;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_ALAW)
+            || base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_G711_MLAW)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int>(64000, 64000);
+        // platform allows multiple channels for this format
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_MSGSM)) {
+        sampleRates = { 8000 };
+        bitRates = Range<int>(13000, 13000);
+        maxChannels = 1;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AC3)) {
+        maxChannels = 6;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_EAC3)) {
+        maxChannels = 16;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_EAC3_JOC)) {
+        sampleRates = { 48000 };
+        bitRates = Range<int>(32000, 6144000);
+        maxChannels = 16;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_AC4)) {
+        sampleRates = { 44100, 48000, 96000, 192000 };
+        bitRates = Range<int>(16000, 2688000);
+        maxChannels = 24;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS)) {
+        sampleRates = { 44100, 48000 };
+        bitRates = Range<int>(96000, 1524000);
+        maxChannels = 6;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_HD)) {
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            switch (profileLevel.mProfile) {
+                case DTS_HDProfileLBR:
+                    sampleRates = { 22050, 24000, 44100, 48000 };
+                    bitRates = Range<int>(32000, 768000);
+                    break;
+                case DTS_HDProfileHRA:
+                case DTS_HDProfileMA:
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int>(96000, 24500000);
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile,
+                            mMediaType.c_str());
+                    mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int>(96000, 24500000);
+            }
+        }
+        maxChannels = 8;
+    } else if (base::EqualsIgnoreCase(mMediaType, MIMETYPE_AUDIO_DTS_UHD)) {
+        for (ProfileLevel profileLevel: mProfileLevels) {
+            switch (profileLevel.mProfile) {
+                case DTS_UHDProfileP2:
+                    sampleRates = { 48000 };
+                    bitRates = Range<int>(96000, 768000);
+                    maxChannels = 10;
+                    break;
+                case DTS_UHDProfileP1:
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int>(96000, 24500000);
+                    maxChannels = 32;
+                    break;
+                default:
+                    ALOGW("Unrecognized profile %d for %s", profileLevel.mProfile,
+                            mMediaType.c_str());
+                    mError |= ERROR_CAPABILITIES_UNRECOGNIZED;
+                    sampleRates = { 44100, 48000, 88200, 96000, 176400, 192000 };
+                    bitRates = Range<int>(96000, 24500000);
+                    maxChannels = 32;
+            }
+        }
+    } else {
+        ALOGW("Unsupported mediaType %s", mMediaType.c_str());
+        mError |= ERROR_CAPABILITIES_UNSUPPORTED;
+    }
+
+    // restrict ranges
+    if (!sampleRates.empty()) {
+        limitSampleRates(sampleRates);
+    } else if (sampleRateRange) {
+        std::vector<Range<int>> rateRanges = { sampleRateRange.value() };
+        limitSampleRates(rateRanges);
+    }
+
+    Range<int> channelRange = Range<int>(1, maxChannels);
+    std::vector<Range<int>> inputChannels = { channelRange };
+    applyLimits(inputChannels, bitRates);
+}
+
+void AudioCapabilities::applyLimits(
+        const std::vector<Range<int>> &inputChannels,
+        const std::optional<Range<int>> &bitRates) {
+    // clamp & make a local copy
+    std::vector<Range<int>> inputChannelsCopy(inputChannels.size());
+    for (int i = 0; i < inputChannels.size(); i++) {
+        int lower = inputChannels[i].clamp(1);
+        int upper = inputChannels[i].clamp(MAX_INPUT_CHANNEL_COUNT);
+        inputChannelsCopy[i] = Range<int>(lower, upper);
+    }
+
+    // sort, intersect with existing, & save channel list
+    sortDistinctRanges(&inputChannelsCopy);
+    mInputChannelRanges = intersectSortedDistinctRanges(inputChannelsCopy, mInputChannelRanges);
+
+    if (bitRates) {
+        mBitrateRange = mBitrateRange.intersect(bitRates.value());
+    }
+}
+
+void AudioCapabilities::parseFromInfo(const sp<AMessage> &format) {
+    int maxInputChannels = MAX_INPUT_CHANNEL_COUNT;
+    std::vector<Range<int>> channels = { Range<int>(1, maxInputChannels) };
+    std::optional<Range<int>> bitRates = POSITIVE_INTEGERS;
+
+    AString rateAString;
+    if (format->findString("sample-rate-ranges", &rateAString)) {
+        std::vector<std::string> rateStrings = base::Split(std::string(rateAString.c_str()), ",");
+        std::vector<Range<int>> rateRanges;
+        for (std::string rateString : rateStrings) {
+            std::optional<Range<int>> rateRange = ParseIntRange(rateString);
+            if (!rateRange) {
+                continue;
+            }
+            rateRanges.push_back(rateRange.value());
+        }
+        limitSampleRates(rateRanges);
+    }
+
+    // we will prefer channel-ranges over max-channel-count
+    AString valueStr;
+    if (format->findString("channel-ranges", &valueStr)) {
+        std::vector<std::string> channelStrings = base::Split(std::string(valueStr.c_str()), ",");
+        std::vector<Range<int>> channelRanges;
+        for (std::string channelString : channelStrings) {
+            std::optional<Range<int>> channelRange = ParseIntRange(channelString);
+            if (!channelRange) {
+                continue;
+            }
+            channelRanges.push_back(channelRange.value());
+        }
+        channels = channelRanges;
+    } else if (format->findString("channel-range", &valueStr)) {
+        std::optional<Range<int>> oneRange = ParseIntRange(std::string(valueStr.c_str()));
+        if (oneRange) {
+            channels = { oneRange.value() };
+        }
+    } else if (format->findString("max-channel-count", &valueStr)) {
+        maxInputChannels = std::atoi(valueStr.c_str());
+        if (maxInputChannels == 0) {
+            channels = { Range<int>(0, 0) };
+        } else {
+            channels = { Range<int>(1, maxInputChannels) };
+        }
+    } else if ((mError & ERROR_CAPABILITIES_UNSUPPORTED) != 0) {
+        maxInputChannels = 0;
+        channels = { Range<int>(0, 0) };
+    }
+
+    if (format->findString("bitrate-range", &valueStr)) {
+        std::optional<Range<int>> parsedBitrate = ParseIntRange(valueStr.c_str());
+        if (parsedBitrate) {
+            bitRates = bitRates.value().intersect(parsedBitrate.value());
+        }
+    }
+
+    applyLimits(channels, bitRates);
+}
+
+void AudioCapabilities::getDefaultFormat(sp<AMessage> &format) {
+    // report settings that have only a single choice
+    if (mBitrateRange.lower() == mBitrateRange.upper()) {
+        format->setInt32(KEY_BIT_RATE, mBitrateRange.lower());
+    }
+    if (getMaxInputChannelCount() == 1) {
+        // mono-only format
+        format->setInt32(KEY_CHANNEL_COUNT, 1);
+    }
+    if (!mSampleRates.empty() && mSampleRates.size() == 1) {
+        format->setInt32(KEY_SAMPLE_RATE, mSampleRates[0]);
+    }
+}
+
+bool AudioCapabilities::supportsFormat(const sp<AMessage> &format) {
+    int32_t sampleRate;
+    format->findInt32(KEY_SAMPLE_RATE, &sampleRate);
+    int32_t channels;
+    format->findInt32(KEY_CHANNEL_COUNT, &channels);
+
+    if (!supports(sampleRate, channels)) {
+        return false;
+    }
+
+    if (!CodecCapabilities::SupportsBitrate(mBitrateRange, format)) {
+        return false;
+    }
+
+    // nothing to do for:
+    // KEY_CHANNEL_MASK: codecs don't get this
+    // KEY_IS_ADTS:      required feature for all AAC decoders
+    return true;
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/CodecCapabilities.cpp b/media/libmedia/CodecCapabilities.cpp
new file mode 100644
index 0000000..5bed1c4
--- /dev/null
+++ b/media/libmedia/CodecCapabilities.cpp
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilities"
+
+#include <utils/Log.h>
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+
+bool CodecCapabilities::SupportsBitrate(Range<int> bitrateRange,
+        const sp<AMessage> &format) {
+    // consider max bitrate over average bitrate for support
+    int32_t maxBitrate = 0;
+    format->findInt32(KEY_MAX_BIT_RATE, &maxBitrate);
+    int32_t bitrate = 0;
+    format->findInt32(KEY_BIT_RATE, &bitrate);
+
+    if (bitrate == 0) {
+        bitrate = maxBitrate;
+    } else if (maxBitrate != 0) {
+        bitrate = std::max(bitrate, maxBitrate);
+    }
+
+    if (bitrate > 0) {
+        return bitrateRange.contains(bitrate);
+    }
+
+    return true;
+}
+
+const std::string& CodecCapabilities::getMediaType() {
+    return mMediaType;
+}
+
+const std::vector<ProfileLevel>& CodecCapabilities::getProfileLevels() {
+    return mProfileLevels;
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/CodecCapabilitiesUtils.cpp b/media/libmedia/CodecCapabilitiesUtils.cpp
new file mode 100644
index 0000000..edfc9be
--- /dev/null
+++ b/media/libmedia/CodecCapabilitiesUtils.cpp
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilitiesUtils"
+#include <utils/Log.h>
+
+#include <algorithm>
+#include <cmath>
+#include <regex>
+#include <string>
+#include <vector>
+
+#include <media/CodecCapabilitiesUtils.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+std::optional<Range<int>> ParseIntRange(const std::string &str) {
+    if (str.empty()) {
+        ALOGW("could not parse empty integer range");
+        return std::nullopt;
+    }
+    int lower, upper;
+    std::regex regex("([0-9]+)-([0-9]+)");
+    std::smatch match;
+    if (std::regex_match(str, match, regex)) {
+        lower = std::atoi(match[1].str().c_str());
+        upper = std::atoi(match[2].str().c_str());
+    } else if (std::atoi(str.c_str()) != 0) {
+        lower = upper = std::atoi(str.c_str());
+    } else {
+        ALOGW("could not parse integer range: %s", str.c_str());
+        return std::nullopt;
+    }
+    return std::make_optional<Range<int>>(lower, upper);
+}
+
+}  // namespace android
\ No newline at end of file
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index cdb1837..ef6250f 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -23,7 +23,6 @@
 #include <binder/Parcel.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IMediaMetadataRetriever.h>
-#include <processgroup/sched_policy.h>
 #include <utils/String8.h>
 #include <utils/KeyedVector.h>
 
diff --git a/media/libmedia/MediaCodecInfo.cpp b/media/libmedia/MediaCodecInfo.cpp
index c45c5c3..d5d1a09 100644
--- a/media/libmedia/MediaCodecInfo.cpp
+++ b/media/libmedia/MediaCodecInfo.cpp
@@ -18,8 +18,6 @@
 #define LOG_TAG "MediaCodecInfo"
 #include <utils/Log.h>
 
-#include <media/IOMX.h>
-
 #include <media/MediaCodecInfo.h>
 
 #include <media/stagefright/foundation/ADebug.h>
diff --git a/media/libmedia/include/media/AudioCapabilities.h b/media/libmedia/include/media/AudioCapabilities.h
new file mode 100644
index 0000000..2bc3335
--- /dev/null
+++ b/media/libmedia/include/media/AudioCapabilities.h
@@ -0,0 +1,138 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef AUDIO_CAPABILITIES_H_
+
+#define AUDIO_CAPABILITIES_H_
+
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+#include <system/audio.h>
+
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct AudioCapabilities {
+    /**
+     * Create AudioCapabilities.
+     */
+    static std::shared_ptr<AudioCapabilities> Create(std::string mediaType,
+            std::vector<ProfileLevel> profLevs, const sp<AMessage> &format);
+
+    /**
+     * Returns the range of supported bitrates in bits/second.
+     */
+    const Range<int>& getBitrateRange() const;
+
+    /**
+     * Returns the array of supported sample rates if the codec
+     * supports only discrete values. Otherwise, it returns an empty array.
+     * The array is sorted in ascending order.
+     */
+    const std::vector<int>& getSupportedSampleRates() const;
+
+    /**
+     * Returns the array of supported sample rate ranges.  The
+     * array is sorted in ascending order, and the ranges are
+     * distinct.
+     */
+    const std::vector<Range<int>>& getSupportedSampleRateRanges() const;
+
+    /**
+     * Returns the maximum number of input channels supported.
+     * The returned value should be between 1 and 255.
+     *
+     * Through {@link android.os.Build.VERSION_CODES#R}, this method indicated support
+     * for any number of input channels between 1 and this maximum value.
+     *
+     * As of {@link android.os.Build.VERSION_CODES#S},
+     * the implied lower limit of 1 channel is no longer valid.
+     * As of {@link android.os.Build.VERSION_CODES#S}, {@link #getMaxInputChannelCount} is
+     * superseded by {@link #getInputChannelCountRanges},
+     * which returns an array of ranges of channels.
+     * The {@link #getMaxInputChannelCount} method will return the highest value
+     * in the ranges returned by {@link #getInputChannelCountRanges}
+     */
+    int getMaxInputChannelCount() const;
+
+    /**
+     * Returns the minimum number of input channels supported.
+     * This is often 1, but does vary for certain mime types.
+     *
+     * This returns the lowest channel count in the ranges returned by
+     * {@link #getInputChannelCountRanges}.
+     */
+    int getMinInputChannelCount() const;
+
+    /**
+     * Returns an array of ranges representing the number of input channels supported.
+     * The codec supports any number of input channels within this range.
+     *
+     * This supersedes the {@link #getMaxInputChannelCount} method.
+     *
+     * For many codecs, this will be a single range [1..N], for some N.
+     *
+     * The returned array cannot be empty.
+     */
+    const std::vector<Range<int>>& getInputChannelCountRanges() const;
+
+    /**
+     * Query whether the sample rate is supported by the codec.
+     */
+    bool isSampleRateSupported(int sampleRate);
+
+    /* For internal use only. Not exposed as a public API */
+    void getDefaultFormat(sp<AMessage> &format);
+
+    /* For internal use only. Not exposed as a public API */
+    bool supportsFormat(const sp<AMessage> &format);
+
+private:
+    static constexpr int MAX_INPUT_CHANNEL_COUNT = 30;
+    static constexpr uint32_t MAX_NUM_CHANNELS = FCC_LIMIT;
+
+    int mError;
+    std::string mMediaType;
+    std::vector<ProfileLevel> mProfileLevels;
+
+    Range<int> mBitrateRange;
+
+    std::vector<int> mSampleRates;
+    std::vector<Range<int>> mSampleRateRanges;
+    std::vector<Range<int>> mInputChannelRanges;
+
+    /* no public constructor */
+    AudioCapabilities() {}
+    void init(std::string mediaType, std::vector<ProfileLevel> profLevs,
+            const sp<AMessage> &format);
+    void initWithPlatformLimits();
+    bool supports(int sampleRate, int inputChannels);
+    void limitSampleRates(std::vector<int> rates);
+    void createDiscreteSampleRates();
+    void limitSampleRates(std::vector<Range<int>> rateRanges);
+    void applyLevelLimits();
+    void applyLimits(const std::vector<Range<int>> &inputChannels,
+            const std::optional<Range<int>> &bitRates);
+    void parseFromInfo(const sp<AMessage> &format);
+
+    friend struct CodecCapabilities;
+};
+
+}  // namespace android
+
+#endif // AUDIO_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/CodecCapabilities.h b/media/libmedia/include/media/CodecCapabilities.h
new file mode 100644
index 0000000..9d1c4ea
--- /dev/null
+++ b/media/libmedia/include/media/CodecCapabilities.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2024, The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_CAPABILITIES_H_
+
+#define CODEC_CAPABILITIES_H_
+
+#include <media/AudioCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/AString.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+#include <utils/Errors.h>
+#include <utils/RefBase.h>
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+
+namespace android {
+
+struct CodecCapabilities {
+
+    static bool SupportsBitrate(Range<int> bitrateRange,
+            const sp<AMessage> &format);
+
+    /**
+     * Returns the media type for which this codec-capability object was created.
+     */
+    const std::string& getMediaType();
+
+    /**
+     * Returns the supported profile levels.
+     */
+    const std::vector<ProfileLevel>& getProfileLevels();
+
+private:
+    std::string mMediaType;
+    std::vector<ProfileLevel> mProfileLevels;
+
+    std::shared_ptr<AudioCapabilities> mAudioCaps;
+};
+
+}  // namespace android
+
+#endif // CODEC_CAPABILITIES_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/CodecCapabilitiesUtils.h b/media/libmedia/include/media/CodecCapabilitiesUtils.h
new file mode 100644
index 0000000..2bf822a
--- /dev/null
+++ b/media/libmedia/include/media/CodecCapabilitiesUtils.h
@@ -0,0 +1,184 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef CODEC_CAPABILITIES__UTILS_H_
+
+#define CODEC_CAPABILITIES__UTILS_H_
+
+#include <algorithm>
+#include <cmath>
+#include <optional>
+#include <string>
+#include <vector>
+
+#include <utils/Log.h>
+
+#include <media/stagefright/foundation/AUtils.h>
+
+namespace android {
+
+struct ProfileLevel {
+    uint32_t mProfile;
+    uint32_t mLevel;
+    bool operator <(const ProfileLevel &o) const {
+        return mProfile < o.mProfile || (mProfile == o.mProfile && mLevel < o.mLevel);
+    }
+};
+
+/**
+ * Immutable class for describing the range of two numeric values.
+ *
+ * To make it immutable, all data are private and all functions are const.
+ *
+ * From frameworks/base/core/java/android/util/Range.java
+ */
+template<typename T>
+struct Range {
+    Range() : lower_(), upper_() {}
+
+    Range(T l, T u) : lower_(l), upper_(u) {}
+
+    constexpr bool empty() const { return lower_ > upper_; }
+
+    T lower() const { return lower_; }
+
+    T upper() const { return upper_; }
+
+    // Check if a value is in the range.
+    bool contains(T value) const {
+        return lower_ <= value && upper_ >= value;
+    }
+
+    bool contains(Range<T> range) const {
+        return (range.lower_ >= lower_) && (range.upper_ <= upper_);
+    }
+
+    // Clamp a value in the range
+    T clamp(T value) const{
+        if (value < lower_) {
+            return lower_;
+        } else if (value > upper_) {
+            return upper_;
+        } else {
+            return value;
+        }
+    }
+
+    // Return the intersected range
+    Range<T> intersect(Range<T> range) const {
+        if (lower_ >= range.lower() && range.upper() >= upper_) {
+            // range includes this
+            return *this;
+        } else if (range.lower() >= lower_ && range.upper() <= upper_) {
+            // this includes range
+            return range;
+        } else {
+            // if ranges are disjoint returns an empty Range(lower > upper)
+            Range<T> result = Range<T>(std::max(lower_, range.lower_),
+                    std::min(upper_, range.upper_));
+            if (result.empty()) {
+                ALOGE("Failed to intersect 2 ranges as they are disjoint");
+            }
+            return result;
+        }
+    }
+
+    /**
+     * Returns the intersection of this range and the inclusive range
+     * specified by {@code [lower, upper]}.
+     * <p>
+     * See {@link #intersect(Range)} for more details.</p>
+     *
+     * @param lower a non-{@code null} {@code T} reference
+     * @param upper a non-{@code null} {@code T} reference
+     * @return the intersection of this range and the other range
+     *
+     * @throws NullPointerException if {@code lower} or {@code upper} was {@code null}
+     * @throws IllegalArgumentException if the ranges are disjoint.
+     */
+    Range<T> intersect(T lower, T upper) {
+        return Range(std::max(lower_, lower), std::min(upper_, upper));
+    }
+
+private:
+    T lower_;
+    T upper_;
+};
+
+static const Range<int> POSITIVE_INTEGERS = Range<int>(1, INT_MAX);
+
+// found stuff that is not supported by framework (=> this should not happen)
+constexpr int ERROR_CAPABILITIES_UNRECOGNIZED   = (1 << 0);
+// found profile/level for which we don't have capability estimates
+constexpr int ERROR_CAPABILITIES_UNSUPPORTED    = (1 << 1);
+// have not found any profile/level for which we don't have capability estimate
+// constexpr int ERROR_NONE_SUPPORTED = (1 << 2);
+
+/**
+ * Sorts distinct (non-intersecting) range array in ascending order.
+ * From frameworks/base/media/java/android/media/Utils.java
+ */
+template<typename T>
+void sortDistinctRanges(std::vector<Range<T>> *ranges) {
+    std::sort(ranges->begin(), ranges->end(),
+            [](Range<T> r1, Range<T> r2) {
+        if (r1.upper() < r2.lower()) {
+            return true;
+        } else if (r1.lower() > r2.upper()) {
+            return false;
+        } else {
+            ALOGE("sample rate ranges must be distinct.");
+            return false;
+        }
+    });
+}
+
+/**
+ * Returns the intersection of two sets of non-intersecting ranges
+ * From frameworks/base/media/java/android/media/Utils.java
+ * @param one a sorted set of non-intersecting ranges in ascending order
+ * @param another another sorted set of non-intersecting ranges in ascending order
+ * @return the intersection of the two sets, sorted in ascending order
+ */
+template<typename T>
+std::vector<Range<T>> intersectSortedDistinctRanges(
+        const std::vector<Range<T>> &one, const std::vector<Range<T>> &another) {
+    std::vector<Range<T>> result;
+    int ix = 0;
+    for (Range<T> range : another) {
+        while (ix < one.size() && one[ix].upper() < range.lower()) {
+            ++ix;
+        }
+        while (ix < one.size() && one[ix].upper() < range.upper()) {
+            result.push_back(range.intersect(one[ix]));
+            ++ix;
+        }
+        if (ix == one.size()) {
+            break;
+        }
+        if (one[ix].lower() <= range.upper()) {
+            result.push_back(range.intersect(one[ix]));
+        }
+    }
+    return result;
+}
+
+// parse string into int range
+std::optional<Range<int>> ParseIntRange(const std::string &str);
+
+}  // namespace android
+
+#endif  // CODEC_CAPABILITIES__UTILS_H_
\ No newline at end of file
diff --git a/media/libmedia/include/media/MediaCodecInfo.h b/media/libmedia/include/media/MediaCodecInfo.h
index 88a2dc4..72aca98 100644
--- a/media/libmedia/include/media/MediaCodecInfo.h
+++ b/media/libmedia/include/media/MediaCodecInfo.h
@@ -20,6 +20,8 @@
 
 #include <android-base/macros.h>
 #include <binder/Parcel.h>
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
 #include <media/stagefright/foundation/ABase.h>
 #include <media/stagefright/foundation/AString.h>
 
@@ -43,13 +45,10 @@
 struct MediaCodecListWriter;
 
 struct MediaCodecInfo : public RefBase {
-    struct ProfileLevel {
-        uint32_t mProfile;
-        uint32_t mLevel;
-        bool operator <(const ProfileLevel &o) const {
-            return mProfile < o.mProfile || (mProfile == o.mProfile && mLevel < o.mLevel);
-        }
-    };
+
+    // Moved to CodecCapabilitiesUtils.h
+    // Map MediaCodecInfo::ProfileLevel to android::ProfileLevel to maintain compatibility.
+    typedef ::android::ProfileLevel ProfileLevel;
 
     struct CapabilitiesWriter;
 
diff --git a/media/libmedia/include/media/RingBuffer.h b/media/libmedia/include/media/RingBuffer.h
index 4d92d87..a08f35e 100644
--- a/media/libmedia/include/media/RingBuffer.h
+++ b/media/libmedia/include/media/RingBuffer.h
@@ -44,8 +44,14 @@
     /**
      * Forward iterator to this class.  Implements an std:forward_iterator.
      */
-    class iterator : public std::iterator<std::forward_iterator_tag, T> {
+    class iterator {
     public:
+        using iterator_category = std::forward_iterator_tag;
+        using value_type = T;
+        using difference_type = std::ptrdiff_t;
+        using pointer = T*;
+        using reference = T&;
+
         iterator(T* ptr, size_t size, size_t pos, size_t ctr);
 
         iterator& operator++();
@@ -357,5 +363,3 @@
 }; // namespace android
 
 #endif // ANDROID_SERVICE_UTILS_RING_BUFFER_H
-
-
diff --git a/media/libmedia/tests/codeccapabilities/Android.bp b/media/libmedia/tests/codeccapabilities/Android.bp
new file mode 100644
index 0000000..79eb71a
--- /dev/null
+++ b/media/libmedia/tests/codeccapabilities/Android.bp
@@ -0,0 +1,36 @@
+cc_test {
+    name: "CodecCapabilitiesTest",
+    team: "trendy_team_media_codec_framework",
+
+    test_suites: [
+        "general-tests",
+    ],
+    gtest: true,
+
+    srcs: [
+        "CodecCapabilitiesTest.cpp",
+    ],
+
+    shared_libs: [
+        "libbinder",
+        "liblog",
+        "libmedia_codeclist", // available >= R
+        "libmedia_codeclist_capabilities",
+        "libstagefright",
+        "libstagefright_foundation",
+        "libutils",
+    ],
+
+    cflags: [
+        "-Werror",
+        "-Wall",
+    ],
+
+    sanitize: {
+        cfi: true,
+        misc_undefined: [
+            "unsigned-integer-overflow",
+            "signed-integer-overflow",
+        ],
+    },
+}
diff --git a/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
new file mode 100644
index 0000000..89c9739
--- /dev/null
+++ b/media/libmedia/tests/codeccapabilities/CodecCapabilitiesTest.cpp
@@ -0,0 +1,150 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "CodecCapabilitiesTest"
+
+#include <utils/Log.h>
+
+#include <memory>
+
+#include <gtest/gtest.h>
+
+#include <binder/Parcel.h>
+
+#include <media/CodecCapabilities.h>
+#include <media/CodecCapabilitiesUtils.h>
+#include <media/MediaCodecInfo.h>
+
+#include <media/stagefright/MediaCodecConstants.h>
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AString.h>
+
+using namespace android;
+
+class AudioCapsAacTest : public testing::Test {
+protected:
+    AudioCapsAacTest() {
+        std::string mediaType = MIMETYPE_AUDIO_AAC;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("bitrate-range", "8000-960000");
+        details->setString("max-channel-count", "8");
+        details->setString("sample-rate-ranges",
+                "7350,8000,11025,12000,16000,22050,24000,32000,44100,48000");
+
+        std::vector<ProfileLevel> profileLevel{
+            ProfileLevel(2, 0),
+            ProfileLevel(5, 0),
+            ProfileLevel(29, 0),
+            ProfileLevel(23, 0),
+            ProfileLevel(39, 0),
+            ProfileLevel(20, 0),
+            ProfileLevel(42, 0),
+        };
+
+        audioCaps = AudioCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<AudioCapabilities> audioCaps;
+};
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_Bitrate) {
+    const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+    EXPECT_EQ(bitrateRange.lower(), 8000) << "bitrate range1 does not match. lower: "
+            << bitrateRange.lower();
+    EXPECT_EQ(bitrateRange.upper(), 510000) << "bitrate range1 does not match. upper: "
+            << bitrateRange.upper();
+}
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_InputChannelCount) {
+    int maxInputChannelCount = audioCaps->getMaxInputChannelCount();
+    EXPECT_EQ(maxInputChannelCount, 8);
+    int minInputChannelCount = audioCaps->getMinInputChannelCount();
+    EXPECT_EQ(minInputChannelCount, 1);
+}
+
+TEST_F(AudioCapsAacTest, AudioCaps_Aac_SupportedSampleRates) {
+    const std::vector<int>& sampleRates = audioCaps->getSupportedSampleRates();
+    EXPECT_EQ(sampleRates, std::vector<int>({7350, 8000, 11025, 12000, 16000, 22050,
+            24000, 32000, 44100, 48000}));
+
+    EXPECT_FALSE(audioCaps->isSampleRateSupported(6000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(8000))
+            << "isSampleRateSupported returned false for supported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(12000))
+            << "isSampleRateSupported returned false for supported sample rate";
+    EXPECT_FALSE(audioCaps->isSampleRateSupported(44000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+    EXPECT_TRUE(audioCaps->isSampleRateSupported(48000))
+            << "isSampleRateSupported returned true for unsupported sample rate";
+}
+
+class AudioCapsRawTest : public testing::Test {
+protected:
+    AudioCapsRawTest() {
+        std::string mediaType = MIMETYPE_AUDIO_RAW;
+
+        sp<AMessage> details = new AMessage;
+        details->setString("bitrate-range", "1-10000000");
+        details->setString("channel-ranges", "1,2,3,4,5,6,7,8,9,10,11,12");
+        details->setString("sample-rate-ranges", "8000-192000");
+
+        std::vector<ProfileLevel> profileLevel;
+
+        audioCaps = AudioCapabilities::Create(mediaType, profileLevel, details);
+    }
+
+    std::shared_ptr<AudioCapabilities> audioCaps;
+};
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_Bitrate) {
+    const Range<int>& bitrateRange = audioCaps->getBitrateRange();
+    EXPECT_EQ(bitrateRange.lower(), 1);
+    EXPECT_EQ(bitrateRange.upper(), 10000000);
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_InputChannelCount) {
+    int maxInputChannelCount = audioCaps->getMaxInputChannelCount();
+    EXPECT_EQ(maxInputChannelCount, 12);
+    int minInputChannelCount = audioCaps->getMinInputChannelCount();
+    EXPECT_EQ(minInputChannelCount, 1);
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_InputChannelCountRanges) {
+    const std::vector<Range<int>>& inputChannelCountRanges
+            = audioCaps->getInputChannelCountRanges();
+    std::vector<Range<int>> expectedOutput({{1,1}, {2,2}, {3,3}, {4,4}, {5,5},
+            {6,6}, {7,7}, {8,8}, {9,9}, {10,10}, {11,11}, {12,12}});
+    ASSERT_EQ(inputChannelCountRanges.size(), expectedOutput.size());
+    for (int i = 0; i < inputChannelCountRanges.size(); i++) {
+        EXPECT_EQ(inputChannelCountRanges.at(i).lower(), expectedOutput.at(i).lower());
+        EXPECT_EQ(inputChannelCountRanges.at(i).upper(), expectedOutput.at(i).upper());
+    }
+}
+
+TEST_F(AudioCapsRawTest, AudioCaps_Raw_SupportedSampleRates) {
+    const std::vector<Range<int>>& sampleRateRanges = audioCaps->getSupportedSampleRateRanges();
+    EXPECT_EQ(sampleRateRanges.size(), 1);
+    EXPECT_EQ(sampleRateRanges.at(0).lower(), 8000);
+    EXPECT_EQ(sampleRateRanges.at(0).upper(), 192000);
+
+    EXPECT_EQ(audioCaps->isSampleRateSupported(7000), false);
+    EXPECT_EQ(audioCaps->isSampleRateSupported(10000), true);
+    EXPECT_EQ(audioCaps->isSampleRateSupported(193000), false);
+}
diff --git a/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
index cc60933..f0db018 100644
--- a/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
+++ b/media/libmedia/tests/mediaplayer/IMediaPlayerTest.cpp
@@ -50,7 +50,7 @@
 
     // We write a length greater than the following session id array. Should be discarded.
     data.writeUint32(2);
-    data.writeUnpadded(kMockByteArray, 1);
+    data.write(kMockByteArray, 1);
 
     status_t result = IMediaPlayer::asBinder(iMediaPlayer_)
             ->transact(PREPARE_DRM, data, &reply);
diff --git a/media/libmediahelper/Android.bp b/media/libmediahelper/Android.bp
index 649f813..b5867a6 100644
--- a/media/libmediahelper/Android.bp
+++ b/media/libmediahelper/Android.bp
@@ -30,9 +30,6 @@
     name: "libmedia_helper",
     vendor_available: true,
     min_sdk_version: "29",
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: [
         "AudioParameter.cpp",
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index 26aa375..98c3382 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -213,6 +213,7 @@
                                                              // format to transport packets.
                                                              // Raw byte streams are used if this
                                                              // is false.
+#define AMEDIAMETRICS_PROP_TOSTRING "toString"             // string
 #define AMEDIAMETRICS_PROP_TOTALINPUTBYTES "totalInputBytes" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_TOTALOUTPUTBYTES "totalOutputBytes" // int32 (MIDI)
 #define AMEDIAMETRICS_PROP_THREADID       "threadId"       // int32 value io handle
@@ -243,6 +244,7 @@
 // Values are strings accepted for a given property.
 
 // An event is a general description, which often is a function name.
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_APPLYVOLUMESHAPER "applyVolumeShaper"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_BEGINAUDIOINTERVALGROUP "beginAudioIntervalGroup"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CLOSE      "close"
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE     "create"
@@ -265,6 +267,8 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID  "setLogSessionId" // AudioTrack, Record
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID "setPlayerIId" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPREFERREDDEVICE "setPreferredDevice" // AudioTrack, Record
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSAMPLERATE "setSampleRate" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME   "setVoiceVolume" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
diff --git a/media/libmediaplayerservice/Android.bp b/media/libmediaplayerservice/Android.bp
index 718f782..a10c509 100644
--- a/media/libmediaplayerservice/Android.bp
+++ b/media/libmediaplayerservice/Android.bp
@@ -45,6 +45,7 @@
         "android.hardware.media.omx@1.0",
         "av-types-aidl-cpp",
         "framework-permission-aidl-cpp",
+        "libaconfig_storage_read_api_cc",
         "libaudioclient_aidl_conversion",
         "libbase",
         "libbinder_ndk",
@@ -76,6 +77,7 @@
         "libstagefright_httplive",
         "libutils",
         "packagemanager_aidl-cpp",
+        "server_configurable_flags",
     ],
 
     header_libs: [
@@ -86,6 +88,7 @@
     ],
 
     static_libs: [
+        "com.android.media.flags.editing-aconfig-cc",
         "libplayerservice_datasource",
         "libstagefright_nuplayer",
         "libstagefright_rtsp",
diff --git a/media/libmediaplayerservice/DeathNotifier.cpp b/media/libmediaplayerservice/DeathNotifier.cpp
index ab22f67..241c52d 100644
--- a/media/libmediaplayerservice/DeathNotifier.cpp
+++ b/media/libmediaplayerservice/DeathNotifier.cpp
@@ -17,11 +17,18 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "MediaPlayerService-DeathNotifier"
 #include <android-base/logging.h>
+#include <map>
 
 #include "DeathNotifier.h"
 
 namespace android {
 
+// Only dereference the cookie if it's valid (if it's in this set)
+// Only used with ndk
+static uintptr_t sCookieKeyCounter = 0;
+static std::map<uintptr_t, wp<DeathNotifier::DeathRecipient>> sCookies;
+static std::mutex sCookiesMutex;
+
 class DeathNotifier::DeathRecipient :
         public IBinder::DeathRecipient,
         public hardware::hidl_death_recipient {
@@ -44,13 +51,32 @@
     }
 
     static void OnBinderDied(void *cookie) {
-        DeathRecipient *thiz = (DeathRecipient *)cookie;
-        thiz->mNotify();
+        std::unique_lock<std::mutex> guard(sCookiesMutex);
+        if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+            sp<DeathRecipient> recipient = it->second.promote();
+            sCookies.erase(it);
+            guard.unlock();
+
+            if (recipient) {
+                LOG(INFO) << "Notifying DeathRecipient from OnBinderDied.";
+                recipient->mNotify();
+            } else {
+                LOG(INFO) <<
+                    "Tried to notify DeathRecipient from OnBinderDied but could not promote.";
+            }
+        }
     }
 
     AIBinder_DeathRecipient *getNdkRecipient() {
         return mNdkRecipient.get();;
     }
+    ~DeathRecipient() {
+        // lock must be taken so object is not used in OnBinderDied"
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+        sCookies.erase(mCookieKey);
+    }
+
+    uintptr_t mCookieKey;
 
 private:
     Notify mNotify;
@@ -73,8 +99,15 @@
       : mService{std::in_place_index<3>, service},
         mDeathRecipient{new DeathRecipient(notify)} {
     mDeathRecipient->initNdk();
+    {
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+        mDeathRecipient->mCookieKey = sCookieKeyCounter++;
+        sCookies[mDeathRecipient->mCookieKey] = mDeathRecipient;
+    }
     AIBinder_linkToDeath(
-            service.get(), mDeathRecipient->getNdkRecipient(), mDeathRecipient.get());
+            service.get(),
+            mDeathRecipient->getNdkRecipient(),
+            reinterpret_cast<void*>(mDeathRecipient->mCookieKey));
 }
 
 DeathNotifier::DeathNotifier(DeathNotifier&& other)
@@ -94,10 +127,11 @@
         std::get<2>(mService)->unlinkToDeath(mDeathRecipient);
         break;
     case 3:
+
         AIBinder_unlinkToDeath(
                 std::get<3>(mService).get(),
                 mDeathRecipient->getNdkRecipient(),
-                mDeathRecipient.get());
+                reinterpret_cast<void*>(mDeathRecipient->mCookieKey));
         break;
     default:
         CHECK(false) << "Corrupted service type during destruction.";
diff --git a/media/libmediaplayerservice/DeathNotifier.h b/media/libmediaplayerservice/DeathNotifier.h
index 24e45a3..0fd7c65 100644
--- a/media/libmediaplayerservice/DeathNotifier.h
+++ b/media/libmediaplayerservice/DeathNotifier.h
@@ -37,10 +37,11 @@
     DeathNotifier(DeathNotifier&& other);
     ~DeathNotifier();
 
+    class DeathRecipient;
+
 private:
     std::variant<std::monostate, sp<IBinder>, sp<HBase>, ::ndk::SpAIBinder> mService;
 
-    class DeathRecipient;
     sp<DeathRecipient> mDeathRecipient;
 };
 
diff --git a/media/libmediaplayerservice/StagefrightRecorder.cpp b/media/libmediaplayerservice/StagefrightRecorder.cpp
index 3ab32f0..086baa3 100644
--- a/media/libmediaplayerservice/StagefrightRecorder.cpp
+++ b/media/libmediaplayerservice/StagefrightRecorder.cpp
@@ -68,6 +68,7 @@
 #include <system/audio.h>
 
 #include <media/stagefright/rtsp/ARTPWriter.h>
+#include <com_android_media_editing_flags.h>
 
 namespace android {
 
@@ -1334,10 +1335,10 @@
         // cause out-of-memory due to large input buffer size. And audio recording
         // probably doesn't make sense in the scenario, since the slow-down factor
         // is probably huge (eg. mSampleRate=48K, mCaptureFps=240, mFrameRate=1).
-        const static int32_t SAMPLE_RATE_HZ_MAX = 192000;
+        const static int32_t kSampleRateHzMax = 192000;
         sourceSampleRate =
                 (mSampleRate * mCaptureFps + mFrameRate / 2) / mFrameRate;
-        if (sourceSampleRate < mSampleRate || sourceSampleRate > SAMPLE_RATE_HZ_MAX) {
+        if (sourceSampleRate < mSampleRate || sourceSampleRate > kSampleRateHzMax) {
             ALOGE("source sample rate out of range! "
                     "(mSampleRate %d, mCaptureFps %.2f, mFrameRate %d",
                     mSampleRate, mCaptureFps, mFrameRate);
@@ -2119,6 +2120,12 @@
 
     if (tsLayers > 1) {
         uint32_t bLayers = std::min(2u, tsLayers - 1); // use up-to 2 B-layers
+        // TODO(b/341121900): Remove this once B frames are handled correctly in screen recorder
+        // use case in case of mic only
+        if (!com::android::media::editing::flags::stagefrightrecorder_enable_b_frames()
+                && mAudioSource == AUDIO_SOURCE_MIC && mVideoSource == VIDEO_SOURCE_SURFACE) {
+            bLayers = 0;
+        }
         uint32_t pLayers = tsLayers - bLayers;
         format->setString(
                 "ts-schema", AStringPrintf("android.generic.%u+%u", pLayers, bLayers));
diff --git a/media/libmediaplayerservice/fuzzer/Android.bp b/media/libmediaplayerservice/fuzzer/Android.bp
index 74b0a85..fcdaff9 100644
--- a/media/libmediaplayerservice/fuzzer/Android.bp
+++ b/media/libmediaplayerservice/fuzzer/Android.bp
@@ -44,7 +44,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback+bugs@google.com",
         ],
         componentid: 155276,
         hotlists: [
@@ -64,6 +64,7 @@
         "mediarecorder_fuzzer.cpp",
     ],
     defaults: [
+        "libaudioflinger_dependencies",
         "libmediaplayerserviceFuzzer_defaults",
     ],
     static_libs: [
@@ -76,12 +77,10 @@
     ],
     shared_libs: [
         "android.hardware.media.omx@1.0",
-        "av-types-aidl-cpp",
         "media_permission-aidl-cpp",
-        "libaudioclient_aidl_conversion",
         "libactivitymanager_aidl",
         "libandroid_net",
-        "libaudioclient",
+        "libaudioflinger",
         "libcamera_client",
         "libcodec2_client",
         "libcrypto",
@@ -89,24 +88,13 @@
         "libdrmframework",
         "libgui",
         "libhidlbase",
-        "liblog",
         "libmedia_codeclist",
         "libmedia_omx",
         "libmediadrm",
-        "libmediametrics",
-        "libmediautils",
-        "libmemunreachable",
         "libnetd_client",
-        "libpowermanager",
         "libstagefright_httplive",
         "packagemanager_aidl-cpp",
         "libfakeservicemanager",
-        "libvibrator",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libaudioprocessing",
-        "libaudioflinger",
         "libresourcemanagerservice",
         "libmediametricsservice",
         "mediametricsservice-aidl-cpp",
@@ -122,10 +110,6 @@
         "android.hardware.camera.device@3.4",
         "libaudiohal@7.0",
     ],
-    header_libs: [
-        "libaudiohal_headers",
-        "libaudioflinger_headers",
-    ],
 }
 
 cc_fuzz {
@@ -141,7 +125,6 @@
         "libplayerservice_datasource",
     ],
     shared_libs: [
-        "libmediaplayerservice",
         "libdatasource",
         "libdrmframework",
         "libstagefright_httplive",
@@ -159,10 +142,13 @@
         "libmediaplayerserviceFuzzer_defaults",
     ],
     static_libs: [
+        "libgmock",
+        "libgtest_ndk_c++",
         "libplayerservice_datasource",
         "libstagefright_nuplayer",
         "libstagefright_rtsp",
         "libstagefright_timedtext",
+        "libbinder_random_parcel",
     ],
     shared_libs: [
         "android.hardware.media.c2@1.0",
@@ -191,7 +177,10 @@
         "libpowermanager",
         "libstagefright_httplive",
         "libaudiohal@7.0",
+        "libmediaextractorservice",
     ],
+    corpus: ["corpus/*"],
+    include_dirs: ["frameworks/av/services/mediaextractor"],
 }
 
 cc_fuzz {
diff --git a/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204 b/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204
new file mode 100755
index 0000000..13e4732
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/08873afe0bb32c29d6aed741d06c3ebfcfcf6204
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f b/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f
new file mode 100755
index 0000000..591816e
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2c1e844c2b86f22075a69121efd280af3104e31f
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4 b/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4
new file mode 100755
index 0000000..2acf349
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2e5297399ed949e919852cb0471cab25bcca82f4
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8 b/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8
new file mode 100755
index 0000000..941885f
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/2f9bd1fce3b3422e31f2f5bb38db695e2ace7bb8
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa b/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa
new file mode 100755
index 0000000..a6920fa
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/3f967d82b89c0b39e04727213ba71910801b85fa
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6 b/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6
new file mode 100755
index 0000000..6b70ddd
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/40a46cd4f323d9d5d8508188f21f94ddae5bfae6
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622 b/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622
new file mode 100755
index 0000000..a919290
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/543adcc7136463c859c38fce066f87b1141bd622
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1 b/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1
new file mode 100755
index 0000000..1062677
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/874152a3db53b5e4c153655e8ee9443e6ec4c0b1
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075 b/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075
new file mode 100755
index 0000000..ed11aff
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/bf37374209d5417fa2a6a1cddcae6df44397b075
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0 b/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0
new file mode 100755
index 0000000..d82f45d
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/d537f5df0726fa840146a4c788855f8658b7aae0
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607 b/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607
new file mode 100755
index 0000000..32af6ee
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/e66a979713ba719fc30005b770b627c187a64607
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112 b/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112
new file mode 100755
index 0000000..abfba79
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/e712688b60610ff05d48b7b74695993a05338112
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324 b/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324
new file mode 100755
index 0000000..7fb1bca
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/f5c60a210c8fbd5a9a3c131f6097d5d07fc45324
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0 b/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..aae78ae
--- /dev/null
+++ b/media/libmediaplayerservice/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
index a189d04..15265bf 100644
--- a/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
+++ b/media/libmediaplayerservice/fuzzer/mediaplayer_fuzzer.cpp
@@ -15,9 +15,13 @@
  *
  */
 
+#include <MediaExtractorService.h>
 #include <MediaPlayerService.h>
+#include <android/gui/BnSurfaceComposerClient.h>
 #include <camera/Camera.h>
 #include <datasource/FileSource.h>
+#include <fuzzbinder/random_binder.h>
+#include <gmock/gmock.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 #include <media/IMediaCodecList.h>
@@ -31,40 +35,100 @@
 #include <media/stagefright/RemoteDataSource.h>
 #include <media/stagefright/foundation/base64.h>
 #include <thread>
+#include "android-base/stringprintf.h"
 #include "fuzzer/FuzzedDataProvider.h"
-
-constexpr int32_t kUuidSize = 16;
-constexpr int32_t kMaxSleepTimeInMs = 100;
-constexpr int32_t kMinSleepTimeInMs = 0;
-constexpr int32_t kPlayCountMin = 1;
-constexpr int32_t kPlayCountMax = 10;
-constexpr int32_t kMaxDimension = 8192;
-constexpr int32_t kMinDimension = 0;
-
 using namespace std;
 using namespace android;
 
-constexpr audio_session_t kSupportedAudioSessions[] = {
-    AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_STAGE, AUDIO_SESSION_OUTPUT_MIX};
+constexpr int32_t kUuidSize = 16;
+constexpr int32_t kMinSize = 0;
+constexpr int32_t kMaxSize = 100;
+constexpr int32_t kFourCCVal = android::FOURCC('m', 't', 'r', 'X');
+constexpr int32_t kFlagVal =
+        ISurfaceComposerClient::eCursorWindow | ISurfaceComposerClient::eOpaque;
 
-constexpr audio_timestretch_stretch_mode_t kAudioStretchModes[] = {
-    AUDIO_TIMESTRETCH_STRETCH_DEFAULT, AUDIO_TIMESTRETCH_STRETCH_VOICE};
+const char dumpFile[] = "OutputDumpFile";
 
-constexpr audio_timestretch_fallback_mode_t kAudioFallbackModes[] = {
-    AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT, AUDIO_TIMESTRETCH_FALLBACK_DEFAULT,
-    AUDIO_TIMESTRETCH_FALLBACK_MUTE, AUDIO_TIMESTRETCH_FALLBACK_FAIL};
+enum DataSourceType { HTTP, FD, STREAM, FILETYPE, SOCKET, kMaxValue = SOCKET };
+
+constexpr audio_flags_mask_t kAudioFlagsMasks[] = {AUDIO_FLAG_NONE,
+                                                   AUDIO_FLAG_AUDIBILITY_ENFORCED,
+                                                   AUDIO_FLAG_SECURE,
+                                                   AUDIO_FLAG_SCO,
+                                                   AUDIO_FLAG_BEACON,
+                                                   AUDIO_FLAG_HW_AV_SYNC,
+                                                   AUDIO_FLAG_HW_HOTWORD,
+                                                   AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY,
+                                                   AUDIO_FLAG_BYPASS_MUTE,
+                                                   AUDIO_FLAG_LOW_LATENCY,
+                                                   AUDIO_FLAG_DEEP_BUFFER,
+                                                   AUDIO_FLAG_NO_MEDIA_PROJECTION,
+                                                   AUDIO_FLAG_MUTE_HAPTIC,
+                                                   AUDIO_FLAG_NO_SYSTEM_CAPTURE,
+                                                   AUDIO_FLAG_CAPTURE_PRIVATE,
+                                                   AUDIO_FLAG_CONTENT_SPATIALIZED,
+                                                   AUDIO_FLAG_NEVER_SPATIALIZE,
+                                                   AUDIO_FLAG_CALL_REDIRECTION};
+
+constexpr audio_content_type_t kAudioContentTypes[] = {
+        AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_CONTENT_TYPE_SPEECH,       AUDIO_CONTENT_TYPE_MUSIC,
+        AUDIO_CONTENT_TYPE_MOVIE,   AUDIO_CONTENT_TYPE_SONIFICATION, AUDIO_CONTENT_TYPE_ULTRASOUND};
+
+constexpr audio_source_t kAudioSources[] = {AUDIO_SOURCE_INVALID,
+                                            AUDIO_SOURCE_DEFAULT,
+                                            AUDIO_SOURCE_MIC,
+                                            AUDIO_SOURCE_VOICE_UPLINK,
+                                            AUDIO_SOURCE_VOICE_DOWNLINK,
+                                            AUDIO_SOURCE_VOICE_CALL,
+                                            AUDIO_SOURCE_CAMCORDER,
+                                            AUDIO_SOURCE_VOICE_RECOGNITION,
+                                            AUDIO_SOURCE_VOICE_COMMUNICATION,
+                                            AUDIO_SOURCE_REMOTE_SUBMIX,
+                                            AUDIO_SOURCE_UNPROCESSED,
+                                            AUDIO_SOURCE_VOICE_PERFORMANCE,
+                                            AUDIO_SOURCE_ECHO_REFERENCE,
+                                            AUDIO_SOURCE_FM_TUNER,
+                                            AUDIO_SOURCE_HOTWORD,
+                                            AUDIO_SOURCE_ULTRASOUND};
+
+constexpr audio_usage_t kAudioUsages[] = {AUDIO_USAGE_UNKNOWN,
+                                          AUDIO_USAGE_MEDIA,
+                                          AUDIO_USAGE_VOICE_COMMUNICATION,
+                                          AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+                                          AUDIO_USAGE_ALARM,
+                                          AUDIO_USAGE_NOTIFICATION,
+                                          AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                                          AUDIO_USAGE_NOTIFICATION_COMMUNICATION_REQUEST,
+                                          AUDIO_USAGE_NOTIFICATION_COMMUNICATION_INSTANT,
+                                          AUDIO_USAGE_NOTIFICATION_COMMUNICATION_DELAYED,
+                                          AUDIO_USAGE_NOTIFICATION_EVENT,
+                                          AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
+                                          AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE,
+                                          AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+                                          AUDIO_USAGE_GAME,
+                                          AUDIO_USAGE_VIRTUAL_SOURCE,
+                                          AUDIO_USAGE_ASSISTANT,
+                                          AUDIO_USAGE_CALL_ASSISTANT,
+                                          AUDIO_USAGE_EMERGENCY,
+                                          AUDIO_USAGE_SAFETY,
+                                          AUDIO_USAGE_VEHICLE_STATUS,
+                                          AUDIO_USAGE_ANNOUNCEMENT};
+
+constexpr PixelFormat kPixelFormat[] = {
+        PIXEL_FORMAT_UNKNOWN,       PIXEL_FORMAT_NONE,        PIXEL_FORMAT_CUSTOM,
+        PIXEL_FORMAT_TRANSLUCENT,   PIXEL_FORMAT_TRANSPARENT, PIXEL_FORMAT_OPAQUE,
+        PIXEL_FORMAT_RGBA_8888,     PIXEL_FORMAT_RGBX_8888,   PIXEL_FORMAT_RGB_888,
+        PIXEL_FORMAT_RGB_565,       PIXEL_FORMAT_BGRA_8888,   PIXEL_FORMAT_RGBA_5551,
+        PIXEL_FORMAT_RGBA_4444,     PIXEL_FORMAT_RGBA_FP16,   PIXEL_FORMAT_RGBA_1010102,
+        PIXEL_FORMAT_R_8,           PIXEL_FORMAT_R_16_UINT,   PIXEL_FORMAT_RG_1616_UINT,
+        PIXEL_FORMAT_RGBA_10101010,
+};
 
 constexpr media_parameter_keys kMediaParamKeys[] = {
     KEY_PARAMETER_CACHE_STAT_COLLECT_FREQ_MS, KEY_PARAMETER_AUDIO_CHANNEL_COUNT,
     KEY_PARAMETER_PLAYBACK_RATE_PERMILLE, KEY_PARAMETER_AUDIO_ATTRIBUTES,
     KEY_PARAMETER_RTP_ATTRIBUTES};
 
-constexpr audio_stream_type_t kAudioStreamTypes[] = {
-    AUDIO_STREAM_DEFAULT,      AUDIO_STREAM_VOICE_CALL,    AUDIO_STREAM_SYSTEM,
-    AUDIO_STREAM_RING,         AUDIO_STREAM_MUSIC,         AUDIO_STREAM_ALARM,
-    AUDIO_STREAM_NOTIFICATION, AUDIO_STREAM_BLUETOOTH_SCO, AUDIO_STREAM_ENFORCED_AUDIBLE,
-    AUDIO_STREAM_DTMF,         AUDIO_STREAM_TTS,           AUDIO_STREAM_ASSISTANT};
-
 constexpr media_event_type kMediaEventTypes[] = {MEDIA_NOP,
                                                  MEDIA_PREPARED,
                                                  MEDIA_PLAYBACK_COMPLETE,
@@ -140,9 +204,26 @@
     DISALLOW_EVIL_CONSTRUCTORS(TestMediaHTTPService);
 };
 
-class BinderDeathNotifier : public IBinder::DeathRecipient {
-   public:
-    void binderDied(const wp<IBinder> &) { abort(); }
+class FakeBnSurfaceComposerClient : public gui::BnSurfaceComposerClient {
+  public:
+    MOCK_METHOD(binder::Status, createSurface,
+                (const std::string& name, int32_t flags, const sp<IBinder>& parent,
+                 const gui::LayerMetadata& metadata, gui::CreateSurfaceResult* outResult),
+                (override));
+
+    MOCK_METHOD(binder::Status, clearLayerFrameStats, (const sp<IBinder>& handle), (override));
+
+    MOCK_METHOD(binder::Status, getLayerFrameStats,
+                (const sp<IBinder>& handle, gui::FrameStats* outStats), (override));
+
+    MOCK_METHOD(binder::Status, mirrorSurface,
+                (const sp<IBinder>& mirrorFromHandle, gui::CreateSurfaceResult* outResult),
+                (override));
+
+    MOCK_METHOD(binder::Status, mirrorDisplay,
+                (int64_t displayId, gui::CreateSurfaceResult* outResult), (override));
+
+    MOCK_METHOD(binder::Status, getSchedulingPolicy, (gui::SchedulingPolicy*), (override));
 };
 
 class MediaPlayerServiceFuzzer {
@@ -153,24 +234,40 @@
     void process(const uint8_t *data, size_t size);
 
    private:
-    bool setDataSource(const uint8_t *data, size_t size);
-    void invokeMediaPlayer();
-    FuzzedDataProvider mFdp;
-    sp<IMediaPlayer> mMediaPlayer = nullptr;
-    sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
-    const int32_t mDataSourceFd;
+     FuzzedDataProvider mFdp;
+     const int32_t mDataSourceFd;
+     sp<IMediaPlayer> mMediaPlayer = nullptr;
+     sp<IMediaPlayerClient> mMediaPlayerClient = nullptr;
+     void invokeMediaPlayer();
+     sp<SurfaceControl> makeSurfaceControl();
+     bool setDataSource(const uint8_t* data, size_t size);
 };
 
-bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t *data, size_t size) {
-    status_t status = -1;
-    enum DataSourceType {http, fd, stream, file, socket, kMaxValue = socket};
-    switch (mFdp.ConsumeEnum<DataSourceType>()) {
-        case http: {
+sp<SurfaceControl> MediaPlayerServiceFuzzer::makeSurfaceControl() {
+     sp<IBinder> handle = getRandomBinder(&mFdp);
+     const sp<FakeBnSurfaceComposerClient> testClient(new FakeBnSurfaceComposerClient());
+     sp<SurfaceComposerClient> client = new SurfaceComposerClient(testClient);
+     uint32_t width = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t height = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t transformHint = mFdp.ConsumeIntegral<uint32_t>();
+     uint32_t flags = mFdp.ConsumeBool() ? kFlagVal : mFdp.ConsumeIntegral<uint32_t>();
+     int32_t format = mFdp.ConsumeBool() ? mFdp.ConsumeIntegral<uint32_t>()
+                                         : mFdp.PickValueInArray(kPixelFormat);
+     int32_t layerId = mFdp.ConsumeIntegral<int32_t>();
+     std::string layerName = android::base::StringPrintf("#%d", layerId);
+     return new SurfaceControl(client, handle, layerId, layerName, width, height, format,
+                               transformHint, flags);
+}
+
+bool MediaPlayerServiceFuzzer::setDataSource(const uint8_t* data, size_t size) {
+     status_t status = UNKNOWN_ERROR;
+     switch (mFdp.ConsumeEnum<DataSourceType>()) {
+        case HTTP: {
             KeyedVector<String8, String8> headers;
             headers.add(String8(mFdp.ConsumeRandomLengthString().c_str()),
                         String8(mFdp.ConsumeRandomLengthString().c_str()));
 
-            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(0, size);
+            uint32_t dataBlobSize = mFdp.ConsumeIntegralInRange<uint16_t>(kMinSize, size);
             vector<uint8_t> uriSuffix = mFdp.ConsumeBytes<uint8_t>(dataBlobSize);
 
             string uri(mFdp.PickValueInArray(kUrlPrefix));
@@ -183,18 +280,17 @@
                     mMediaPlayer->setDataSource(testService /*httpService*/, uri.c_str(), &headers);
             break;
         }
-        case fd: {
+        case FD: {
             write(mDataSourceFd, data, size);
-
             status = mMediaPlayer->setDataSource(mDataSourceFd, 0, size);
             break;
         }
-        case stream: {
+        case STREAM: {
             sp<IStreamSource> streamSource = sp<TestStreamSource>::make();
             status = mMediaPlayer->setDataSource(streamSource);
             break;
         }
-        case file: {
+        case FILETYPE: {
             write(mDataSourceFd, data, size);
 
             sp<DataSource> dataSource = new FileSource(dup(mDataSourceFd), 0, size);
@@ -205,7 +301,7 @@
             status = mMediaPlayer->setDataSource(iDataSource);
             break;
         }
-        case socket: {
+        case SOCKET: {
             String8 rtpParams = String8(mFdp.ConsumeRandomLengthString().c_str());
             struct sockaddr_in endpoint;
             endpoint.sin_family = mFdp.ConsumeIntegral<unsigned short>();
@@ -214,190 +310,250 @@
             status = mMediaPlayer->setDataSource(rtpParams);
             break;
         }
-    }
-
-    if (status != 0) {
+     }
+     if (status != OK) {
         return false;
-    }
-    return true;
+     }
+     return true;
 }
 
 void MediaPlayerServiceFuzzer::invokeMediaPlayer() {
-    sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
-    String8 name = String8(mFdp.ConsumeRandomLengthString().c_str());
-    uint32_t width = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
-    uint32_t height = mFdp.ConsumeIntegralInRange<uint32_t>(kMinDimension, kMaxDimension);
-    uint32_t pixelFormat = mFdp.ConsumeIntegral<int32_t>();
-    uint32_t flags = mFdp.ConsumeIntegral<int32_t>();
-    sp<SurfaceControl> surfaceControl =
-        composerClient->createSurface(name, width, height, pixelFormat, flags);
-    if (surfaceControl) {
-        sp<Surface> surface = surfaceControl->getSurface();
-        mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
-    }
-
-    BufferingSettings buffering;
-    buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
-    buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
-    mMediaPlayer->setBufferingSettings(buffering);
-    mMediaPlayer->getBufferingSettings(&buffering);
-
-    mMediaPlayer->prepareAsync();
-    size_t playCount = mFdp.ConsumeIntegralInRange<size_t>(kPlayCountMin, kPlayCountMax);
-    for (size_t Idx = 0; Idx < playCount; ++Idx) {
-        mMediaPlayer->start();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mMediaPlayer->pause();
-        this_thread::sleep_for(chrono::milliseconds(
-            mFdp.ConsumeIntegralInRange<int32_t>(kMinSleepTimeInMs, kMaxSleepTimeInMs)));
-        mMediaPlayer->stop();
-    }
-    bool state;
-    mMediaPlayer->isPlaying(&state);
-
-    AudioPlaybackRate rate;
-    rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
-    rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
-    rate.mStretchMode = mFdp.PickValueInArray(kAudioStretchModes);
-    rate.mFallbackMode = mFdp.PickValueInArray(kAudioFallbackModes);
-    mMediaPlayer->setPlaybackSettings(rate);
-    mMediaPlayer->getPlaybackSettings(&rate);
-
-    AVSyncSettings *avSyncSettings = new AVSyncSettings();
-    float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
-    mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
-    mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
-    delete avSyncSettings;
-
-    mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>());
-
-    int32_t msec;
-    mMediaPlayer->getCurrentPosition(&msec);
-    mMediaPlayer->getDuration(&msec);
-    mMediaPlayer->reset();
-
-    mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<int64_t>());
-
-    mMediaPlayer->setAudioStreamType(mFdp.PickValueInArray(kAudioStreamTypes));
-    mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>());
-    float left = mFdp.ConsumeFloatingPoint<float>();
-    float right = mFdp.ConsumeFloatingPoint<float>();
-    mMediaPlayer->setVolume(left, right);
-
-    Parcel request, reply;
-    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    request.setDataPosition(0);
-    mMediaPlayer->invoke(request, &reply);
-
-    Parcel filter;
-    filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    filter.setDataPosition(0);
-    mMediaPlayer->setMetadataFilter(filter);
-
-    bool updateOnly = mFdp.ConsumeBool();
-    bool applyFilter = mFdp.ConsumeBool();
-    mMediaPlayer->getMetadata(updateOnly, applyFilter, &reply);
-    mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>());
-    mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>());
-
-    int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
-    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    request.setDataPosition(0);
-    mMediaPlayer->setParameter(key, request);
-    key = mFdp.PickValueInArray(kMediaParamKeys);
-    mMediaPlayer->getParameter(key, &reply);
-
-    struct sockaddr_in endpoint;
-    mMediaPlayer->getRetransmitEndpoint(&endpoint);
-
-    AttributionSourceState attributionSource;
-    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
-    attributionSource.token = sp<BBinder>::make();
-    const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
-    sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
-        mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
-    mMediaPlayer->setNextPlayer(mNextMediaPlayer);
-
-    const sp<media::VolumeShaper::Configuration> configuration =
-        sp<media::VolumeShaper::Configuration>::make();
-    const sp<media::VolumeShaper::Operation> operation = sp<media::VolumeShaper::Operation>::make();
-    mMediaPlayer->applyVolumeShaper(configuration, operation);
-
-    mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>());
-    uint8_t uuid[kUuidSize];
-    for (int32_t index = 0; index < kUuidSize; ++index) {
-        uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
-    }
-    Vector<uint8_t> drmSessionId;
-    drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
-    mMediaPlayer->prepareDrm(uuid, drmSessionId);
-    mMediaPlayer->releaseDrm();
-
-    audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
-    mMediaPlayer->setOutputDevice(deviceId);
-    mMediaPlayer->getRoutedDeviceId(&deviceId);
-
-    mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool());
-
-    sp<MediaPlayer> mediaPlayer = (MediaPlayer *)mMediaPlayer.get();
-
-    int32_t msg = mFdp.PickValueInArray(kMediaEventTypes);
-    int32_t ext1 = mFdp.PickValueInArray(kMediaInfoTypes);
-    int32_t ext2 = mFdp.ConsumeIntegral<int32_t>();
-    Parcel obj;
-    obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
-    obj.setDataPosition(0);
-    mediaPlayer->notify(msg, ext1, ext2, &obj);
-
-    int32_t mediaPlayerDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mediaPlayer->dump(mediaPlayerDumpFd, args);
-    close(mediaPlayerDumpFd);
-
-    mMediaPlayer->disconnect();
+     Parcel request, reply;
+     while (mFdp.remaining_bytes()) {
+        auto invokeMediaPlayerApi = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    sp<SurfaceControl> surfaceControl = makeSurfaceControl();
+                    if (surfaceControl) {
+                        sp<Surface> surface = surfaceControl->getSurface();
+                        mMediaPlayer->setVideoSurfaceTexture(surface->getIGraphicBufferProducer());
+                    }
+                },
+                [&]() {
+                    BufferingSettings buffering;
+                    buffering.mInitialMarkMs = mFdp.ConsumeIntegral<int32_t>();
+                    buffering.mResumePlaybackMarkMs = mFdp.ConsumeIntegral<int32_t>();
+                    mMediaPlayer->setBufferingSettings(buffering);
+                },
+                [&]() {
+                    BufferingSettings buffering;
+                    mMediaPlayer->getBufferingSettings(&buffering);
+                },
+                [&]() {
+                    mMediaPlayer->prepareAsync();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() {
+                    mMediaPlayer->start();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() {
+                    mMediaPlayer->pause();
+                    this_thread::sleep_for(chrono::milliseconds(100));  // Time to post message
+                },
+                [&]() { mMediaPlayer->stop(); },
+                [&]() {
+                    bool state;
+                    mMediaPlayer->isPlaying(&state);
+                },
+                [&]() {
+                    AudioPlaybackRate rate;
+                    rate.mSpeed = mFdp.ConsumeFloatingPoint<float>();
+                    rate.mPitch = mFdp.ConsumeFloatingPoint<float>();
+                    rate.mStretchMode = mFdp.ConsumeBool() ? AUDIO_TIMESTRETCH_STRETCH_DEFAULT
+                                                           : AUDIO_TIMESTRETCH_STRETCH_VOICE;
+                    rate.mFallbackMode =
+                            (audio_timestretch_fallback_mode_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_TIMESTRETCH_FALLBACK_CUT_REPEAT,
+                                    AUDIO_TIMESTRETCH_FALLBACK_FAIL);
+                    mMediaPlayer->setPlaybackSettings(rate);
+                    mMediaPlayer->getPlaybackSettings(&rate);
+                },
+                [&]() {
+                    AVSyncSettings* avSyncSettings = new AVSyncSettings();
+                    float videoFpsHint = mFdp.ConsumeFloatingPoint<float>();
+                    mMediaPlayer->setSyncSettings(*avSyncSettings, videoFpsHint);
+                    delete avSyncSettings;
+                },
+                [&]() {
+                    AVSyncSettings* avSyncSettings = new AVSyncSettings();
+                    float videoFpsHint = 0;
+                    mMediaPlayer->getSyncSettings(avSyncSettings, &videoFpsHint);
+                    delete avSyncSettings;
+                },
+                [&]() { mMediaPlayer->seekTo(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    int32_t msec;
+                    mMediaPlayer->getCurrentPosition(&msec);
+                    mMediaPlayer->getDuration(&msec);
+                },
+                [&]() { mMediaPlayer->reset(); },
+                [&]() { mMediaPlayer->notifyAt(mFdp.ConsumeIntegral<uint64_t>()); },
+                [&]() {
+                    mMediaPlayer->setAudioStreamType(
+                            (audio_stream_type_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_STREAM_VOICE_CALL, AUDIO_STREAM_CALL_ASSISTANT));
+                },
+                [&]() { mMediaPlayer->setLooping(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    mMediaPlayer->setVolume(mFdp.ConsumeFloatingPoint<float>() /* left */,
+                                            mFdp.ConsumeFloatingPoint<float>() /* right */);
+                },
+                [&]() {
+                    request.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    request.setDataPosition(0);
+                    mMediaPlayer->invoke(request, &reply);
+                },
+                [&]() {
+                    Parcel filter;
+                    filter.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    filter.setDataPosition(0);
+                    mMediaPlayer->setMetadataFilter(filter);
+                },
+                [&]() {
+                    mMediaPlayer->getMetadata(mFdp.ConsumeBool() /* updateOnly */,
+                                              mFdp.ConsumeBool() /* applyFilter */, &reply);
+                },
+                [&]() { mMediaPlayer->setAuxEffectSendLevel(mFdp.ConsumeFloatingPoint<float>()); },
+                [&]() { mMediaPlayer->attachAuxEffect(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    int32_t key = mFdp.PickValueInArray(kMediaParamKeys);
+                    request.writeInt32((audio_usage_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_USAGE_UNKNOWN, AUDIO_USAGE_ANNOUNCEMENT) /* usage */);
+                    request.writeInt32((audio_content_type_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_CONTENT_TYPE_UNKNOWN,
+                            AUDIO_CONTENT_TYPE_ULTRASOUND) /* content_type */);
+                    request.writeInt32((audio_source_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_SOURCE_INVALID, AUDIO_SOURCE_ULTRASOUND) /* source */);
+                    request.writeInt32((audio_flags_mask_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                            AUDIO_FLAG_NONE, AUDIO_FLAG_CALL_REDIRECTION) /* flags */);
+                    request.writeInt32(mFdp.ConsumeBool() /* hasFlattenedTag */);
+                    request.writeString16(
+                            String16((mFdp.ConsumeRandomLengthString()).c_str()) /* tags */);
+                    request.setDataPosition(0);
+                    mMediaPlayer->setParameter(key, request);
+                    key = mFdp.PickValueInArray(kMediaParamKeys);
+                    mMediaPlayer->getParameter(key, &reply);
+                },
+                [&]() {
+                    int32_t key =
+                            mFdp.ConsumeBool() ? kFourCCVal : mFdp.ConsumeIntegral<uint32_t>();
+                    mMediaPlayer->getParameter(key, &reply);
+                },
+                [&]() {
+                    struct sockaddr_in endpoint;
+                    mMediaPlayer->getRetransmitEndpoint(&endpoint);
+                },
+                [&]() {
+                    AttributionSourceState attributionSource;
+                    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+                    attributionSource.token = sp<BBinder>::make();
+                    const sp<IMediaPlayerService> mpService(
+                            IMediaDeathNotifier::getMediaPlayerService());
+                    audio_session_t audioSessionId =
+                            (audio_session_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                    AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_MIX);
+                    sp<IMediaPlayer> mNextMediaPlayer = mpService->create(
+                            mMediaPlayerClient, audioSessionId, attributionSource);
+                    mMediaPlayer->setNextPlayer(mNextMediaPlayer);
+                },
+                [&]() {
+                    const sp<media::VolumeShaper::Configuration> configuration =
+                            sp<media::VolumeShaper::Configuration>::make();
+                    const sp<media::VolumeShaper::Operation> operation =
+                            sp<media::VolumeShaper::Operation>::make();
+                    mMediaPlayer->applyVolumeShaper(configuration, operation);
+                },
+                [&]() { mMediaPlayer->getVolumeShaperState(mFdp.ConsumeIntegral<int32_t>()); },
+                [&]() {
+                    uint8_t uuid[kUuidSize];
+                    for (int32_t index = 0; index < kUuidSize; ++index) {
+                        uuid[index] = mFdp.ConsumeIntegral<uint8_t>();
+                    }
+                    Vector<uint8_t> drmSessionId;
+                    int32_t length = mFdp.ConsumeIntegralInRange<uint32_t>(kMinSize, kMaxSize);
+                    while (length--) {
+                        drmSessionId.push_back(mFdp.ConsumeIntegral<uint8_t>());
+                    }
+                    mMediaPlayer->prepareDrm(uuid, drmSessionId);
+                },
+                [&]() { mMediaPlayer->releaseDrm(); },
+                [&]() {
+                    audio_port_handle_t deviceId = mFdp.ConsumeIntegral<int32_t>();
+                    mMediaPlayer->setOutputDevice(deviceId);
+                },
+                [&]() {
+                    audio_port_handle_t deviceId;
+                    mMediaPlayer->getRoutedDeviceId(&deviceId);
+                },
+                [&]() { mMediaPlayer->enableAudioDeviceCallback(mFdp.ConsumeBool()); },
+                [&]() {
+                    sp<MediaPlayer> mediaPlayer = (MediaPlayer*)mMediaPlayer.get();
+                    Parcel obj;
+                    obj.writeInt32(mFdp.ConsumeIntegral<int32_t>());
+                    obj.setDataPosition(0);
+                    mediaPlayer->notify(mFdp.PickValueInArray(kMediaEventTypes) /* msg */,
+                                        mFdp.PickValueInArray(kMediaInfoTypes) /* ext1 */,
+                                        mFdp.ConsumeIntegral<int32_t>() /* ext2 */, &obj);
+                },
+                [&]() {
+                    sp<MediaPlayer> mediaPlayer = (MediaPlayer*)mMediaPlayer.get();
+                    int32_t mediaPlayerDumpFd = memfd_create(dumpFile, MFD_ALLOW_SEALING);
+                    Vector<String16> args;
+                    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+                    mediaPlayer->dump(mediaPlayerDumpFd, args);
+                    close(mediaPlayerDumpFd);
+                },
+                [&]() { mMediaPlayer->disconnect(); },
+        });
+        invokeMediaPlayerApi();
+     }
 }
 
-void MediaPlayerServiceFuzzer::process(const uint8_t *data, size_t size) {
-    MediaPlayerService::instantiate();
-
-    const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
-    if (!mpService) {
+void MediaPlayerServiceFuzzer::process(const uint8_t* data, size_t size) {
+     const sp<IMediaPlayerService> mpService(IMediaDeathNotifier::getMediaPlayerService());
+     if (!mpService) {
         return;
-    }
+     }
 
-    sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
+     sp<IMediaCodecList> mediaCodecList = mpService->getCodecList();
 
-    sp<IRemoteDisplayClient> remoteDisplayClient;
-    sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
-        String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/, remoteDisplayClient,
-        String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
+     sp<IRemoteDisplayClient> remoteDisplayClient;
+     sp<IRemoteDisplay> remoteDisplay = mpService->listenForRemoteDisplay(
+             String16(mFdp.ConsumeRandomLengthString().c_str()) /*opPackageName*/,
+             remoteDisplayClient, String8(mFdp.ConsumeRandomLengthString().c_str()) /*iface*/);
 
-    mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
-    Parcel reply;
-    mpService->pullBatteryData(&reply);
+     mpService->addBatteryData(mFdp.ConsumeIntegral<uint32_t>());
+     Parcel reply;
+     mpService->pullBatteryData(&reply);
 
-    sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService *)mpService.get();
-    AttributionSourceState attributionSource;
-    attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
-    attributionSource.token = sp<BBinder>::make();
-    mMediaPlayer = mediaPlayerService->create(
-        mMediaPlayerClient, mFdp.PickValueInArray(kSupportedAudioSessions), attributionSource);
+     sp<MediaPlayerService> mediaPlayerService = (MediaPlayerService*)mpService.get();
+     AttributionSourceState attributionSource;
+     attributionSource.packageName = mFdp.ConsumeRandomLengthString().c_str();
+     attributionSource.token = sp<BBinder>::make();
+     mMediaPlayer =
+             mediaPlayerService->create(mMediaPlayerClient,
+                                        (audio_session_t)mFdp.ConsumeIntegralInRange<int32_t>(
+                                                AUDIO_SESSION_DEVICE, AUDIO_SESSION_OUTPUT_MIX),
+                                        attributionSource);
 
-    int32_t mediaPlayerServiceDumpFd = memfd_create("OutputDumpFile", MFD_ALLOW_SEALING);
-    Vector<String16> args;
-    args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
-    mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
-    close(mediaPlayerServiceDumpFd);
+     int32_t mediaPlayerServiceDumpFd = memfd_create(dumpFile, MFD_ALLOW_SEALING);
+     Vector<String16> args;
+     args.push_back(String16(mFdp.ConsumeRandomLengthString().c_str()));
+     mediaPlayerService->dump(mediaPlayerServiceDumpFd, args);
+     close(mediaPlayerServiceDumpFd);
 
-    if (!mMediaPlayer) {
+     if (!mMediaPlayer) {
         return;
-    }
-
-    if (setDataSource(data, size)) {
+     }
+     if (setDataSource(data, size)) {
         invokeMediaPlayer();
-    }
+     }
+}
+
+extern "C" int LLVMFuzzerInitialize(int* /* argc */, char*** /* argv */) {
+     MediaPlayerService::instantiate();
+     MediaExtractorService::instantiate();
+     return 0;
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
index f4143da..3987a67 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoder.cpp
@@ -43,6 +43,12 @@
 #include <mpeg2ts/ATSParser.h>
 #include <gui/Surface.h>
 
+#define ATRACE_TAG ATRACE_TAG_AUDIO
+#include <utils/Trace.h>
+
+#include <android-base/stringprintf.h>
+using ::android::base::StringPrintf;
+
 namespace android {
 
 static float kDisplayRefreshingRate = 60.f; // TODO: get this from the display
@@ -157,7 +163,10 @@
                     int32_t index;
                     CHECK(msg->findInt32("index", &index));
 
+                    ATRACE_BEGIN(StringPrintf("Nuplayer::handleAnInputBuffer [%s]",
+                                              mIsAudio ? "audio" : "video").c_str());
                     handleAnInputBuffer(index);
+                    ATRACE_END();
                     break;
                 }
 
@@ -175,7 +184,10 @@
                     CHECK(msg->findInt64("timeUs", &timeUs));
                     CHECK(msg->findInt32("flags", &flags));
 
+                    ATRACE_BEGIN(StringPrintf("Nuplayer::handleAnOutputBuffer [%s]",
+                                              mIsAudio ? "audio" : "video").c_str());
                     handleAnOutputBuffer(index, offset, size, timeUs, flags);
+                    ATRACE_END();
                     break;
                 }
 
@@ -184,7 +196,10 @@
                     sp<AMessage> format;
                     CHECK(msg->findMessage("format", &format));
 
+                    ATRACE_BEGIN(StringPrintf("Nuplayer::handleOutputFormatChange [%s]",
+                                              mIsAudio ? "audio" : "video").c_str());
                     handleOutputFormatChange(format);
+                    ATRACE_END();
                     break;
                 }
 
@@ -205,15 +220,16 @@
                     break;
                 }
             }
-
             break;
         }
 
         case kWhatRenderBuffer:
         {
+            ATRACE_BEGIN("Nuplayer::onRenderBuffer");
             if (!isStaleReply(msg)) {
                 onRenderBuffer(msg);
             }
+            ATRACE_END();
             break;
         }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
index 3e96d27..0cb5062 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDecoderBase.cpp
@@ -27,6 +27,12 @@
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AMessage.h>
 
+#define ATRACE_TAG ATRACE_TAG_AUDIO
+#include <utils/Trace.h>
+
+#include <android-base/stringprintf.h>
+using ::android::base::StringPrintf;
+
 namespace android {
 
 NuPlayer::DecoderBase::DecoderBase(const sp<AMessage> &notify)
@@ -129,9 +135,11 @@
     switch (msg->what()) {
         case kWhatConfigure:
         {
+            ATRACE_BEGIN("NuPlayer::DecoderBase::onConfigure");
             sp<AMessage> format;
             CHECK(msg->findMessage("format", &format));
             onConfigure(format);
+            ATRACE_END();
             break;
         }
 
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index c6595ba..851d252 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -36,6 +36,11 @@
 #include <media/stagefright/Utils.h>
 #include <media/stagefright/FoundationUtils.h>
 
+#define ATRACE_TAG ATRACE_TAG_AUDIO
+#include <utils/Trace.h>
+#include <android-base/stringprintf.h>
+using ::android::base::StringPrintf;
+
 static const int kDumpLockRetries = 50;
 static const int kDumpLockSleepUs = 20000;
 
@@ -146,9 +151,11 @@
         const char *url,
         const KeyedVector<String8, String8> *headers) {
     ALOGV("setDataSource(%p) url(%s)", this, uriDebugString(url, false).c_str());
+    ATRACE_BEGIN(StringPrintf("setDataSource(%p)", this).c_str());
     Mutex::Autolock autoLock(mLock);
 
     if (mState != STATE_IDLE) {
+        ATRACE_END();
         return INVALID_OPERATION;
     }
 
@@ -159,15 +166,18 @@
     while (mState == STATE_SET_DATASOURCE_PENDING) {
         mCondition.wait(mLock);
     }
+    ATRACE_END();
 
     return mAsyncResult;
 }
 
 status_t NuPlayerDriver::setDataSource(int fd, int64_t offset, int64_t length) {
     ALOGV("setDataSource(%p) file(%d)", this, fd);
+    ATRACE_BEGIN(StringPrintf("setDataSource(%p) file(%d)", this, fd).c_str());
     Mutex::Autolock autoLock(mLock);
 
     if (mState != STATE_IDLE) {
+        ATRACE_END();
         return INVALID_OPERATION;
     }
 
@@ -178,15 +188,18 @@
     while (mState == STATE_SET_DATASOURCE_PENDING) {
         mCondition.wait(mLock);
     }
+    ATRACE_END();
 
     return mAsyncResult;
 }
 
 status_t NuPlayerDriver::setDataSource(const sp<IStreamSource> &source) {
     ALOGV("setDataSource(%p) stream source", this);
+    ATRACE_BEGIN(StringPrintf("setDataSource(%p) stream source", this).c_str());
     Mutex::Autolock autoLock(mLock);
 
     if (mState != STATE_IDLE) {
+        ATRACE_END();
         return INVALID_OPERATION;
     }
 
@@ -197,15 +210,18 @@
     while (mState == STATE_SET_DATASOURCE_PENDING) {
         mCondition.wait(mLock);
     }
+    ATRACE_END();
 
     return mAsyncResult;
 }
 
 status_t NuPlayerDriver::setDataSource(const sp<DataSource> &source) {
     ALOGV("setDataSource(%p) callback source", this);
+    ATRACE_BEGIN(StringPrintf("setDataSource(%p) callback source", this).c_str());
     Mutex::Autolock autoLock(mLock);
 
     if (mState != STATE_IDLE) {
+        ATRACE_END();
         return INVALID_OPERATION;
     }
 
@@ -216,15 +232,18 @@
     while (mState == STATE_SET_DATASOURCE_PENDING) {
         mCondition.wait(mLock);
     }
+    ATRACE_END();
 
     return mAsyncResult;
 }
 
 status_t NuPlayerDriver::setDataSource(const String8& rtpParams) {
     ALOGV("setDataSource(%p) rtp source", this);
+    ATRACE_BEGIN(StringPrintf("setDataSource(%p) rtp source", this).c_str());
     Mutex::Autolock autoLock(mLock);
 
     if (mState != STATE_IDLE) {
+        ATRACE_END();
         return INVALID_OPERATION;
     }
 
@@ -235,6 +254,7 @@
     while (mState == STATE_SET_DATASOURCE_PENDING) {
         mCondition.wait(mLock);
     }
+    ATRACE_END();
 
     return mAsyncResult;
 }
@@ -295,8 +315,11 @@
 
 status_t NuPlayerDriver::prepare() {
     ALOGV("prepare(%p)", this);
+    ATRACE_BEGIN(StringPrintf("prepare(%p)", this).c_str());
     Mutex::Autolock autoLock(mLock);
-    return prepare_l();
+    status_t ret = prepare_l();
+    ATRACE_END();
+    return ret;
 }
 
 status_t NuPlayerDriver::prepare_l() {
@@ -354,8 +377,11 @@
 
 status_t NuPlayerDriver::start() {
     ALOGV("start(%p), state is %d, eos is %d", this, mState, mAtEOS);
+    ATRACE_BEGIN(StringPrintf("start(%p), state is %d, eos is %d", this, mState, mAtEOS).c_str());
     Mutex::Autolock autoLock(mLock);
-    return start_l();
+    status_t ret = start_l();
+    ATRACE_END();
+    return ret;
 }
 
 status_t NuPlayerDriver::start_l() {
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 3d4e955..899d50e 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -15,6 +15,7 @@
  */
 
 //#define LOG_NDEBUG 0
+#define ATRACE_TAG ATRACE_TAG_AUDIO
 #define LOG_TAG "NuPlayerRenderer"
 #include <utils/Log.h>
 
@@ -37,6 +38,9 @@
 
 #include <inttypes.h>
 
+#include <android-base/stringprintf.h>
+using ::android::base::StringPrintf;
+
 namespace android {
 
 /*
@@ -102,6 +106,10 @@
     switch (pcmEncoding) {
     case kAudioEncodingPcmFloat:
         return AUDIO_FORMAT_PCM_FLOAT;
+    case kAudioEncodingPcm32bit:
+        return AUDIO_FORMAT_PCM_32_BIT;
+    case kAudioEncodingPcm24bitPacked:
+        return AUDIO_FORMAT_PCM_24_BIT_PACKED;
     case kAudioEncodingPcm16bit:
         return AUDIO_FORMAT_PCM_16_BIT;
     case kAudioEncodingPcm8bit:
@@ -1996,6 +2004,8 @@
         bool isStreaming) {
     ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
             offloadOnly, offloadingAudio());
+    ATRACE_BEGIN(StringPrintf("NuPlayer::Renderer::onOpenAudioSink: offloadOnly(%d) "
+            "offloadingAudio(%d)", offloadOnly, offloadingAudio()).c_str());
     bool audioSinkChanged = false;
 
     int32_t numChannels;
@@ -2025,7 +2035,12 @@
     if (offloadingAudio()) {
         AString mime;
         CHECK(format->findString("mime", &mime));
-        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        status_t err = OK;
+        if (audioFormat == AUDIO_FORMAT_PCM_16_BIT) {
+            // If there is probably no pcm-encoding in the format message, try to get the format by
+            // its mimetype.
+            err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        }
 
         if (err != OK) {
             ALOGE("Couldn't map mime \"%s\" to a valid "
@@ -2062,6 +2077,7 @@
             if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
                 ALOGV("openAudioSink: no change in offload mode");
                 // no change from previous configuration, everything ok.
+                ATRACE_END();
                 return OK;
             }
             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
@@ -2131,6 +2147,7 @@
         if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
             ALOGV("openAudioSink: no change in pcm mode");
             // no change from previous configuration, everything ok.
+            ATRACE_END();
             return OK;
         }
 
@@ -2175,6 +2192,7 @@
             ALOGW("openAudioSink: non offloaded open failed status: %d", err);
             mAudioSink->close();
             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+            ATRACE_END();
             return err;
         }
         mCurrentPcmInfo = info;
@@ -2186,13 +2204,16 @@
         onAudioSinkChanged();
     }
     mAudioTornDown = false;
+    ATRACE_END();
     return OK;
 }
 
 void NuPlayer::Renderer::onCloseAudioSink() {
+    ATRACE_BEGIN("NuPlyer::Renderer::onCloseAudioSink");
     mAudioSink->close();
     mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
     mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
+    ATRACE_END();
 }
 
 void NuPlayer::Renderer::onChangeAudioFormat(
diff --git a/media/libnblog/Reader.cpp b/media/libnblog/Reader.cpp
index 71ebfd1..d5f16e8 100644
--- a/media/libnblog/Reader.cpp
+++ b/media/libnblog/Reader.cpp
@@ -93,7 +93,7 @@
     do {
         availToRead = mFifoReader->obtain(iovec, capacity, NULL /*timeout*/, &lostTemp);
         lost += lostTemp;
-    } while (availToRead < 0 || ++tries <= kMaxObtainTries);
+    } while (availToRead < 0 && ++tries <= kMaxObtainTries);
 
     if (availToRead <= 0) {
         ALOGW_IF(availToRead < 0, "NBLog Reader %s failed to catch up with Writer", mName.c_str());
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index 1593aa0..e06efac 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -7575,6 +7575,22 @@
             return true;
         }
 
+        // When Acodec receive an error event at LoadedToIdleState, it will not release
+        // allocated buffers, which will cause gralloc buffer leak issue. We need to first release
+        // these buffers and then process the error event
+        case OMX_EventError:
+        {
+            if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) {
+                mCodec->freeBuffersOnPort(kPortIndexInput);
+            }
+
+            if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) {
+                mCodec->freeBuffersOnPort(kPortIndexOutput);
+            }
+
+            return BaseState::onOMXEvent(event, data1, data2);
+        }
+
         default:
             return BaseState::onOMXEvent(event, data1, data2);
     }
diff --git a/media/libstagefright/ACodecBufferChannel.cpp b/media/libstagefright/ACodecBufferChannel.cpp
index ad42813..16e267b 100644
--- a/media/libstagefright/ACodecBufferChannel.cpp
+++ b/media/libstagefright/ACodecBufferChannel.cpp
@@ -50,7 +50,7 @@
 using namespace hardware::cas::native::V1_0;
 using DrmBufferType = hardware::drm::V1_0::BufferType;
 using BufferInfo = ACodecBufferChannel::BufferInfo;
-using BufferInfoIterator = std::vector<const BufferInfo>::const_iterator;
+using BufferInfoIterator = std::vector<BufferInfo>::const_iterator;
 
 ACodecBufferChannel::~ACodecBufferChannel() {
     if (mCrypto != nullptr && mDealer != nullptr && mHeapSeqNum >= 0) {
@@ -59,7 +59,7 @@
 }
 
 static BufferInfoIterator findClientBuffer(
-        const std::shared_ptr<const std::vector<const BufferInfo>> &array,
+        const std::shared_ptr<const std::vector<BufferInfo>> &array,
         const sp<MediaCodecBuffer> &buffer) {
     return std::find_if(
             array->begin(), array->end(),
@@ -67,7 +67,7 @@
 }
 
 static BufferInfoIterator findBufferId(
-        const std::shared_ptr<const std::vector<const BufferInfo>> &array,
+        const std::shared_ptr<const std::vector<BufferInfo>> &array,
         IOMX::buffer_id bufferId) {
     return std::find_if(
             array->begin(), array->end(),
@@ -97,7 +97,7 @@
 }
 
 status_t ACodecBufferChannel::queueInputBuffer(const sp<MediaCodecBuffer> &buffer) {
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
     if (it == array->end()) {
@@ -138,7 +138,7 @@
     if (!hasCryptoOrDescrambler() || mDealer == nullptr) {
         return -ENOSYS;
     }
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
     if (it == array->end()) {
@@ -352,7 +352,7 @@
         size_t numSubSamples,
         const sp<MediaCodecBuffer> &buffer,
         AString* errorDetailMsg) {
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
     if (it == array->end()) {
@@ -473,7 +473,7 @@
 
 status_t ACodecBufferChannel::renderOutputBuffer(
         const sp<MediaCodecBuffer> &buffer, int64_t timestampNs) {
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mOutputBuffers));
     BufferInfoIterator it = findClientBuffer(array, buffer);
     if (it == array->end()) {
@@ -495,7 +495,7 @@
 }
 
 status_t ACodecBufferChannel::discardBuffer(const sp<MediaCodecBuffer> &buffer) {
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     bool input = true;
     BufferInfoIterator it = findClientBuffer(array, buffer);
@@ -517,7 +517,7 @@
 }
 
 void ACodecBufferChannel::getInputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
-    std::shared_ptr<const std::vector<const BufferInfo>> inputBuffers(
+    std::shared_ptr<const std::vector<BufferInfo>> inputBuffers(
             std::atomic_load(&mInputBuffers));
     array->clear();
     for (const BufferInfo &elem : *inputBuffers) {
@@ -526,7 +526,7 @@
 }
 
 void ACodecBufferChannel::getOutputBufferArray(Vector<sp<MediaCodecBuffer>> *array) {
-    std::shared_ptr<const std::vector<const BufferInfo>> outputBuffers(
+    std::shared_ptr<const std::vector<BufferInfo>> outputBuffers(
             std::atomic_load(&mOutputBuffers));
     array->clear();
     for (const BufferInfo &elem : *outputBuffers) {
@@ -583,7 +583,7 @@
             mDecryptDestination = mDealer->allocate(destinationBufferSize);
         }
     }
-    std::vector<const BufferInfo> inputBuffers;
+    std::vector<BufferInfo> inputBuffers;
     for (const BufferAndId &elem : array) {
         sp<IMemory> sharedEncryptedBuffer;
         if (hasCryptoOrDescrambler()) {
@@ -593,22 +593,22 @@
     }
     std::atomic_store(
             &mInputBuffers,
-            std::make_shared<const std::vector<const BufferInfo>>(inputBuffers));
+            std::make_shared<const std::vector<BufferInfo>>(inputBuffers));
 }
 
 void ACodecBufferChannel::setOutputBufferArray(const std::vector<BufferAndId> &array) {
-    std::vector<const BufferInfo> outputBuffers;
+    std::vector<BufferInfo> outputBuffers;
     for (const BufferAndId &elem : array) {
         outputBuffers.emplace_back(elem.mBuffer, elem.mBufferId, nullptr);
     }
     std::atomic_store(
             &mOutputBuffers,
-            std::make_shared<const std::vector<const BufferInfo>>(outputBuffers));
+            std::make_shared<const std::vector<BufferInfo>>(outputBuffers));
 }
 
 void ACodecBufferChannel::fillThisBuffer(IOMX::buffer_id bufferId) {
     ALOGV("fillThisBuffer #%d", bufferId);
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mInputBuffers));
     BufferInfoIterator it = findBufferId(array, bufferId);
 
@@ -629,7 +629,7 @@
         IOMX::buffer_id bufferId,
         OMX_U32 omxFlags) {
     ALOGV("drainThisBuffer #%d", bufferId);
-    std::shared_ptr<const std::vector<const BufferInfo>> array(
+    std::shared_ptr<const std::vector<BufferInfo>> array(
             std::atomic_load(&mOutputBuffers));
     BufferInfoIterator it = findBufferId(array, bufferId);
 
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 886285e..d084f10 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -315,14 +315,16 @@
         "libaudioclient_aidl_conversion",
         "packagemanager_aidl-cpp",
         "server_configurable_flags",
+        "libaconfig_storage_read_api_cc",
         "aconfig_mediacodec_flags_c_lib",
     ],
 
     static_libs: [
         "android.media.codec-aconfig-cc",
+        "com.android.media.flags.editing-aconfig-cc",
         "libstagefright_esds",
         "libstagefright_color_conversion",
-        "libyuv_static",
+        "libyuv",
         "libstagefright_webm",
         "libstagefright_timedtext",
         "libogg",
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index 4441121..e26f189 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -33,6 +33,7 @@
 #include <camera/Camera.h>
 #include <camera/CameraParameters.h>
 #include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
 #include <utils/String8.h>
 #include <cutils/properties.h>
@@ -150,9 +151,15 @@
     int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
 
     if (camera == 0) {
-        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                /*forceSlowJpegMode*/false);
+        AttributionSourceState clientAttribution;
+        clientAttribution.pid = clientPid;
+        clientAttribution.uid = clientUid;
+        clientAttribution.deviceId = kDefaultDeviceId;
+        clientAttribution.packageName = clientName;
+
+        mCamera = Camera::connect(cameraId, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                /*forceSlowJpegMode*/false, clientAttribution);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
@@ -465,11 +472,13 @@
         ALOGE("%s: Buffer queue already exists", __FUNCTION__);
         return ALREADY_EXISTS;
     }
-
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     // Create a buffer queue.
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
+#endif // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
 
     uint32_t usage = GRALLOC_USAGE_SW_READ_OFTEN;
     if (format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
@@ -478,9 +487,15 @@
 
     bufferCount += kConsumerBufferCount;
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mVideoBufferConsumer = new BufferItemConsumer(usage, bufferCount);
+    mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
+    mVideoBufferProducer = mVideoBufferConsumer->getSurface()->getIGraphicBufferProducer();
+#else
     mVideoBufferConsumer = new BufferItemConsumer(consumer, usage, bufferCount);
     mVideoBufferConsumer->setName(String8::format("StageFright-CameraSource"));
     mVideoBufferProducer = producer;
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     status_t res = mVideoBufferConsumer->setDefaultBufferSize(width, height);
     if (res != OK) {
diff --git a/media/libstagefright/FrameDecoder.cpp b/media/libstagefright/FrameDecoder.cpp
index 46703bb..cc78510 100644
--- a/media/libstagefright/FrameDecoder.cpp
+++ b/media/libstagefright/FrameDecoder.cpp
@@ -18,19 +18,14 @@
 #define LOG_TAG "FrameDecoder"
 #define ATRACE_TAG  ATRACE_TAG_VIDEO
 #include "include/FrameDecoder.h"
-#include "include/FrameCaptureLayer.h"
-#include "include/HevcUtils.h"
+#include <android_media_codec.h>
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
 #include <gui/Surface.h>
 #include <inttypes.h>
-#include <mediadrm/ICrypto.h>
 #include <media/IMediaSource.h>
 #include <media/MediaCodecBuffer.h>
-#include <media/stagefright/foundation/avc_utils.h>
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/foundation/AMessage.h>
-#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/CodecBase.h>
 #include <media/stagefright/ColorConverter.h>
 #include <media/stagefright/FrameCaptureProcessor.h>
 #include <media/stagefright/MediaBuffer.h>
@@ -39,13 +34,24 @@
 #include <media/stagefright/MediaDefs.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/Utils.h>
+#include <media/stagefright/foundation/ADebug.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/ColorUtils.h>
+#include <media/stagefright/foundation/avc_utils.h>
+#include <mediadrm/ICrypto.h>
 #include <private/media/VideoFrame.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
+#include "include/FrameCaptureLayer.h"
+#include "include/HevcUtils.h"
+
+#include <C2Buffer.h>
+#include <Codec2BufferUtils.h>
 
 namespace android {
 
 static const int64_t kBufferTimeOutUs = 10000LL; // 10 msec
+static const int64_t kAsyncBufferTimeOutUs = 2000000LL; // 2000 msec
 static const size_t kRetryCount = 100; // must be >0
 static const int64_t kDefaultSampleDurationUs = 33333LL; // 33ms
 // For codec, 0 is the highest importance; higher the number lesser important.
@@ -232,6 +238,104 @@
     return false;
 }
 
+AsyncCodecHandler::AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder) {
+    mFrameDecoder = frameDecoder;
+}
+
+void AsyncCodecHandler::onMessageReceived(const sp<AMessage>& msg) {
+    switch (msg->what()) {
+        case FrameDecoder::kWhatCallbackNotify:
+            int32_t callbackId;
+            if (!msg->findInt32("callbackID", &callbackId)) {
+                ALOGE("kWhatCallbackNotify: callbackID is expected.");
+                break;
+            }
+            switch (callbackId) {
+                case MediaCodec::CB_INPUT_AVAILABLE: {
+                    int32_t index;
+                    if (!msg->findInt32("index", &index)) {
+                        ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+                        break;
+                    }
+                    ALOGD("CB_INPUT_AVAILABLE received, index is %d", index);
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleInputBufferAsync(index);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_OUTPUT_AVAILABLE: {
+                    int32_t index;
+                    int64_t timeUs;
+                    CHECK(msg->findInt32("index", &index));
+                    CHECK(msg->findInt64("timeUs", &timeUs));
+                    ALOGD("CB_OUTPUT_AVAILABLE received, index is %d", index);
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleOutputBufferAsync(index, timeUs);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
+                    ALOGD("CB_OUTPUT_FORMAT_CHANGED received");
+                    sp<AMessage> format;
+                    if (!msg->findMessage("format", &format) || format == nullptr) {
+                        ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+                        break;
+                    }
+                    sp<FrameDecoder> frameDecoder = mFrameDecoder.promote();
+                    if (frameDecoder != nullptr) {
+                        frameDecoder->handleOutputFormatChangeAsync(format);
+                    }
+                    break;
+                }
+                case MediaCodec::CB_ERROR: {
+                    status_t err;
+                    int32_t actionCode;
+                    AString detail;
+                    if (!msg->findInt32("err", &err)) {
+                        ALOGE("CB_ERROR: err is expected.");
+                        break;
+                    }
+                    if (!msg->findInt32("actionCode", &actionCode)) {
+                        ALOGE("CB_ERROR: actionCode is expected.");
+                        break;
+                    }
+                    msg->findString("detail", &detail);
+                    ALOGE("Codec reported error(0x%x/%s), actionCode(%d), detail(%s)", err,
+                          StrMediaError(err).c_str(), actionCode, detail.c_str());
+                    break;
+                }
+                default:
+                    ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", callbackId);
+                    break;
+            }
+            break;
+        default:
+            ALOGE("unexpected message received: %s", msg->debugString().c_str());
+            break;
+    }
+}
+
+void InputBufferIndexQueue::enqueue(int32_t index) {
+    std::scoped_lock<std::mutex> lock(mMutex);
+    mQueue.push(index);
+    mCondition.notify_one();
+}
+
+bool InputBufferIndexQueue::dequeue(int32_t* index, int32_t timeOutUs) {
+    std::unique_lock<std::mutex> lock(mMutex);
+    bool hasAvailableIndex = mCondition.wait_for(lock, std::chrono::microseconds(timeOutUs),
+                                                 [this] { return !mQueue.empty(); });
+    if (hasAvailableIndex) {
+        *index = mQueue.front();
+        mQueue.pop();
+        return true;
+    } else {
+        return false;
+    }
+}
+
 //static
 sp<IMemory> FrameDecoder::getMetadataOnly(
         const sp<MetaData> &trackMeta, int colorFormat, bool thumbnail, uint32_t bitDepth) {
@@ -281,6 +385,7 @@
         const sp<MetaData> &trackMeta,
         const sp<IMediaSource> &source)
     : mComponentName(componentName),
+      mUseBlockModel(false),
       mTrackMeta(trackMeta),
       mSource(source),
       mDstFormat(OMX_COLOR_Format16bitRGB565),
@@ -290,6 +395,10 @@
 }
 
 FrameDecoder::~FrameDecoder() {
+    if (mHandler != NULL) {
+        mAsyncLooper->stop();
+        mAsyncLooper->unregisterHandler(mHandler->id());
+    }
     if (mDecoder != NULL) {
         mDecoder->release();
         mSource->stop();
@@ -333,8 +442,18 @@
         return (decoder.get() == NULL) ? NO_MEMORY : err;
     }
 
+    if (mUseBlockModel) {
+        mAsyncLooper = new ALooper;
+        mAsyncLooper->start();
+        mHandler = new AsyncCodecHandler(wp<FrameDecoder>(this));
+        mAsyncLooper->registerHandler(mHandler);
+        sp<AMessage> callbackMsg = new AMessage(kWhatCallbackNotify, mHandler);
+        decoder->setCallback(callbackMsg);
+    }
+
     err = decoder->configure(
-            videoFormat, mSurface, NULL /* crypto */, 0 /* flags */);
+            videoFormat, mSurface, NULL /* crypto */,
+            mUseBlockModel ? MediaCodec::CONFIGURE_FLAG_USE_BLOCK_MODEL : 0 /* flags */);
     if (err != OK) {
         ALOGW("configure returned error %d (%s)", err, asString(err));
         decoder->release();
@@ -362,10 +481,18 @@
 sp<IMemory> FrameDecoder::extractFrame(FrameRect *rect) {
     ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ExtractFrame");
     status_t err = onExtractRect(rect);
-    if (err == OK) {
+    if (err != OK) {
+        ALOGE("onExtractRect error %d", err);
+        return NULL;
+    }
+
+    if (!mUseBlockModel) {
         err = extractInternal();
+    } else {
+        err = extractInternalUsingBlockModel();
     }
     if (err != OK) {
+        ALOGE("extractInternal error %d", err);
         return NULL;
     }
 
@@ -380,6 +507,7 @@
         ALOGE("decoder is not initialized");
         return NO_INIT;
     }
+
     do {
         size_t index;
         int64_t ptsUs = 0LL;
@@ -433,7 +561,8 @@
                         (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
                         mediaBuffer->range_length());
 
-                onInputReceived(codecBuffer, mediaBuffer->meta_data(), mFirstSample, &flags);
+                onInputReceived(codecBuffer->data(), codecBuffer->size(), mediaBuffer->meta_data(),
+                                mFirstSample, &flags);
                 mFirstSample = false;
             }
 
@@ -487,11 +616,14 @@
                         ALOGE("failed to get output buffer %zu", index);
                         break;
                     }
+                    uint8_t* frameData = videoFrameBuffer->data();
+                    sp<ABuffer> imageData;
+                    videoFrameBuffer->meta()->findBuffer("image-data", &imageData);
                     if (mSurface != nullptr) {
                         mDecoder->renderOutputBufferAndRelease(index);
-                        err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+                        err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
                     } else {
-                        err = onOutputReceived(videoFrameBuffer, mOutputFormat, ptsUs, &done);
+                        err = onOutputReceived(frameData, imageData, mOutputFormat, ptsUs, &done);
                         mDecoder->releaseOutputBuffer(index);
                     }
                 } else {
@@ -510,6 +642,75 @@
     return err;
 }
 
+status_t FrameDecoder::extractInternalUsingBlockModel() {
+    status_t err = OK;
+    MediaBufferBase* mediaBuffer = NULL;
+    int64_t ptsUs = 0LL;
+    uint32_t flags = 0;
+    int32_t index;
+    mHandleOutputBufferAsyncDone = false;
+
+    err = mSource->read(&mediaBuffer, &mReadOptions);
+    mReadOptions.clearSeekTo();
+    if (err != OK) {
+        ALOGW("Input Error: err=%d", err);
+        if (mediaBuffer) {
+            mediaBuffer->release();
+        }
+        return err;
+    }
+
+    size_t inputSize = mediaBuffer->range_length();
+    std::shared_ptr<C2LinearBlock> block =
+            MediaCodec::FetchLinearBlock(inputSize, {std::string{mComponentName.c_str()}});
+    C2WriteView view{block->map().get()};
+    if (view.error() != C2_OK) {
+        ALOGE("Fatal error: failed to allocate and map a block");
+        mediaBuffer->release();
+        return NO_MEMORY;
+    }
+    if (inputSize > view.capacity()) {
+        ALOGE("Fatal error: allocated block is too small "
+              "(input size %zu; block cap %u)",
+              inputSize, view.capacity());
+        mediaBuffer->release();
+        return BAD_VALUE;
+    }
+    CHECK(mediaBuffer->meta_data().findInt64(kKeyTime, &ptsUs));
+    memcpy(view.base(), (const uint8_t*)mediaBuffer->data() + mediaBuffer->range_offset(),
+           inputSize);
+    std::shared_ptr<C2Buffer> c2Buffer =
+            C2Buffer::CreateLinearBuffer(block->share(0, inputSize, C2Fence{}));
+    onInputReceived(view.base(), inputSize, mediaBuffer->meta_data(), true /* firstSample */,
+                    &flags);
+    flags |= MediaCodec::BUFFER_FLAG_EOS;
+    mediaBuffer->release();
+
+    std::vector<AccessUnitInfo> infoVec;
+    infoVec.emplace_back(flags, inputSize, ptsUs);
+    sp<BufferInfosWrapper> infos = new BufferInfosWrapper{std::move(infoVec)};
+
+    if (!mInputBufferIndexQueue.dequeue(&index, kAsyncBufferTimeOutUs)) {
+        ALOGE("No available input buffer index for async mode.");
+        return TIMED_OUT;
+    }
+
+    AString errorDetailMsg;
+    ALOGD("QueueLinearBlock: index=%d size=%zu ts=%" PRId64 " us flags=%x",
+            index, inputSize, ptsUs,flags);
+    err = mDecoder->queueBuffer(index, c2Buffer, infos, nullptr, &errorDetailMsg);
+    if (err != OK) {
+        ALOGE("failed to queueBuffer (err %d): %s", err, errorDetailMsg.c_str());
+        return err;
+    }
+
+    // wait for handleOutputBufferAsync() to finish
+    std::unique_lock _lk(mMutex);
+    mOutputFramePending.wait_for(_lk, std::chrono::microseconds(kAsyncBufferTimeOutUs),
+                                 [this] { return mHandleOutputBufferAsyncDone; });
+    return mHandleOutputBufferAsyncDone ? OK : TIMED_OUT;
+}
+
 //////////////////////////////////////////////////////////////////////
 
 VideoFrameDecoder::VideoFrameDecoder(
@@ -525,6 +726,81 @@
       mDefaultSampleDurationUs(0) {
 }
 
+status_t FrameDecoder::handleOutputFormatChangeAsync(sp<AMessage> format) {
+    // Here format is MediaCodec's internal copy of output format.
+    // Make a copy since the client might modify it.
+    mOutputFormat = format->dup();
+    ALOGD("receive output format in async mode: %s", mOutputFormat->debugString().c_str());
+    return OK;
+}
+
+status_t FrameDecoder::handleInputBufferAsync(int32_t index) {
+    mInputBufferIndexQueue.enqueue(index);
+    return OK;
+}
+
+status_t FrameDecoder::handleOutputBufferAsync(int32_t index, int64_t timeUs) {
+    if (mHandleOutputBufferAsyncDone) {
+        // we have already processed an output buffer, skip others
+        return OK;
+    }
+
+    status_t err = OK;
+    sp<MediaCodecBuffer> videoFrameBuffer;
+    err = mDecoder->getOutputBuffer(index, &videoFrameBuffer);
+    if (err != OK || videoFrameBuffer == nullptr) {
+        ALOGE("failed to get output buffer %d", index);
+        return err;
+    }
+
+    bool onOutputReceivedDone = false;
+    if (mSurface != nullptr) {
+        mDecoder->renderOutputBufferAndRelease(index);
+        // frameData and imgObj will be fetched by captureSurface() inside onOutputReceived()
+        // explicitly pass null here
+        err = onOutputReceived(nullptr, nullptr, mOutputFormat, timeUs, &onOutputReceivedDone);
+    } else {
+        // get stride and frame data for block model buffer
+        std::shared_ptr<C2Buffer> c2buffer = videoFrameBuffer->asC2Buffer();
+        if (!c2buffer
+                || c2buffer->data().type() != C2BufferData::GRAPHIC
+                || c2buffer->data().graphicBlocks().size() == 0u) {
+            ALOGE("C2Buffer precond fail");
+            return ERROR_MALFORMED;
+        }
+
+        std::unique_ptr<const C2GraphicView> view(std::make_unique<const C2GraphicView>(
+            c2buffer->data().graphicBlocks()[0].map().get()));
+        GraphicView2MediaImageConverter converter(*view, mOutputFormat, false /* copy */);
+        if (converter.initCheck() != OK) {
+            ALOGE("Converter init failed: %d", converter.initCheck());
+            return NO_INIT;
+        }
+
+        uint8_t* frameData = converter.wrap()->data();
+        sp<ABuffer> imageData = converter.imageData();
+        if (imageData != nullptr) {
+            mOutputFormat->setBuffer("image-data", imageData);
+            MediaImage2 *img = (MediaImage2*) imageData->data();
+            if (img->mNumPlanes > 0 && img->mType != img->MEDIA_IMAGE_TYPE_UNKNOWN) {
+                int32_t stride = img->mPlane[0].mRowInc;
+                mOutputFormat->setInt32(KEY_STRIDE, stride);
+                ALOGD("updating stride = %d", stride);
+            }
+        }
+
+        err = onOutputReceived(frameData, imageData, mOutputFormat, timeUs, &onOutputReceivedDone);
+        mDecoder->releaseOutputBuffer(index);
+    }
+
+    if (err == OK && onOutputReceivedDone) {
+        std::lock_guard _lm(mMutex);
+        mHandleOutputBufferAsyncDone = true;
+        mOutputFramePending.notify_one();
+    }
+    return err;
+}
+
 sp<AMessage> VideoFrameDecoder::onGetFormatAndSeekOptions(
         int64_t frameTimeUs, int seekMode,
         MediaSource::ReadOptions *options,
@@ -575,8 +851,13 @@
     bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
             || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
     if (!isSeekingClosest) {
-        videoFormat->setInt32("android._num-input-buffers", 1);
-        videoFormat->setInt32("android._num-output-buffers", 1);
+        if (mComponentName.startsWithIgnoreCase("c2.")) {
+            mUseBlockModel = android::media::codec::provider_->thumbnail_block_model();
+        } else {
+            // OMX Codec
+            videoFormat->setInt32("android._num-input-buffers", 1);
+            videoFormat->setInt32("android._num-output-buffers", 1);
+        }
     }
 
     if (isHDR(videoFormat)) {
@@ -601,9 +882,8 @@
     return videoFormat;
 }
 
-status_t VideoFrameDecoder::onInputReceived(
-        const sp<MediaCodecBuffer> &codecBuffer,
-        MetaDataBase &sampleMeta, bool firstSample, uint32_t *flags) {
+status_t VideoFrameDecoder::onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                            bool firstSample, uint32_t* flags) {
     bool isSeekingClosest = (mSeekMode == MediaSource::ReadOptions::SEEK_CLOSEST)
             || (mSeekMode == MediaSource::ReadOptions::SEEK_FRAME_INDEX);
 
@@ -612,10 +892,7 @@
         ALOGV("Seeking closest: targetTimeUs=%lld", (long long)mTargetTimeUs);
     }
 
-    if (!isSeekingClosest
-            && ((mIsAvc && IsIDR(codecBuffer->data(), codecBuffer->size()))
-            || (mIsHevc && IsIDR(
-            codecBuffer->data(), codecBuffer->size())))) {
+    if (!isSeekingClosest && ((mIsAvc && IsIDR(data, size)) || (mIsHevc && IsIDR(data, size)))) {
         // Only need to decode one IDR frame, unless we're seeking with CLOSEST
         // option, in which case we need to actually decode to targetTimeUs.
         *flags |= MediaCodec::BUFFER_FLAG_EOS;
@@ -630,7 +907,8 @@
 }
 
 status_t VideoFrameDecoder::onOutputReceived(
-        const sp<MediaCodecBuffer> &videoFrameBuffer,
+        uint8_t* frameData,
+        sp<ABuffer> imgObj,
         const sp<AMessage> &outputFormat,
         int64_t timeUs, bool *done) {
     int64_t durationUs = mDefaultSampleDurationUs;
@@ -703,7 +981,6 @@
         }
 
         mFrame = static_cast<VideoFrame*>(frameMem->unsecurePointer());
-
         setFrame(frameMem);
     }
 
@@ -712,7 +989,7 @@
     if (mCaptureLayer != nullptr) {
         return captureSurface();
     }
-    ColorConverter converter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
+    ColorConverter colorConverter((OMX_COLOR_FORMATTYPE)srcFormat, dstFormat());
 
     uint32_t standard, range, transfer;
     if (!outputFormat->findInt32("color-standard", (int32_t*)&standard)) {
@@ -724,22 +1001,25 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
-    sp<ABuffer> imgObj;
-    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+
+    if (imgObj != nullptr) {
         MediaImage2 *imageData = nullptr;
         imageData = (MediaImage2 *)(imgObj.get()->data());
         if (imageData != nullptr) {
-            converter.setSrcMediaImage2(*imageData);
+            colorConverter.setSrcMediaImage2(*imageData);
         }
     }
     if (srcFormat == COLOR_FormatYUV420Flexible && imgObj.get() == nullptr) {
         return ERROR_UNSUPPORTED;
     }
-    converter.setSrcColorSpace(standard, range, transfer);
-    if (converter.isValid()) {
+    colorConverter.setSrcColorSpace(standard, range, transfer);
+    if (colorConverter.isValid()) {
         ScopedTrace trace(ATRACE_TAG, "FrameDecoder::ColorConverter");
-        converter.convert(
-                (const uint8_t *)videoFrameBuffer->data(),
+        if (frameData == nullptr) {
+            ALOGD("frameData is null for ColorConverter");
+        }
+        colorConverter.convert(
+                (const uint8_t *)frameData,
                 width, height, stride,
                 crop_left, crop_top, crop_right, crop_bottom,
                 mFrame->getFlattenedData(),
@@ -955,7 +1235,8 @@
 }
 
 status_t MediaImageDecoder::onOutputReceived(
-        const sp<MediaCodecBuffer> &videoFrameBuffer,
+        uint8_t* frameData,
+        sp<ABuffer> imgObj,
         const sp<AMessage> &outputFormat, int64_t /*timeUs*/, bool *done) {
     if (outputFormat == NULL) {
         return ERROR_MALFORMED;
@@ -1008,8 +1289,8 @@
     if (!outputFormat->findInt32("color-transfer", (int32_t*)&transfer)) {
         transfer = 0;
     }
-    sp<ABuffer> imgObj;
-    if (videoFrameBuffer->meta()->findBuffer("image-data", &imgObj)) {
+
+    if (imgObj != nullptr) {
         MediaImage2 *imageData = nullptr;
         imageData = (MediaImage2 *)(imgObj.get()->data());
         if (imageData != nullptr) {
@@ -1058,7 +1339,7 @@
 
     if (converter.isValid()) {
         converter.convert(
-                (const uint8_t *)videoFrameBuffer->data(),
+                (const uint8_t *)frameData,
                 width, height, stride,
                 crop_left, crop_top, crop_right, crop_bottom,
                 mFrame->getFlattenedData(),
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index e918b5e..3aa0107 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -53,6 +53,8 @@
 #include <media/esds/ESDS.h>
 #include "include/HevcUtils.h"
 
+#include <com_android_media_editing_flags.h>
+
 #ifndef __predict_false
 #define __predict_false(exp) __builtin_expect((exp) != 0, 0)
 #endif
@@ -72,6 +74,9 @@
 static const int64_t kMaxMetadataSize = 0x4000000LL;   // 64MB max per-frame metadata size
 static const int64_t kMaxCttsOffsetTimeUs = 30 * 60 * 1000000LL;  // 30 minutes
 static const size_t kESDSScratchBufferSize = 10;  // kMaxAtomSize in Mpeg4Extractor 64MB
+// Allow up to 100 milli second, which is safely above the maximum delay observed in manual testing
+// between posting from setNextFd and handling it
+static const int64_t kFdCondWaitTimeoutNs = 100000000;
 
 static const char kMetaKey_Version[]    = "com.android.version";
 static const char kMetaKey_Manufacturer[]      = "com.android.manufacturer";
@@ -1262,9 +1267,13 @@
         return OK;
     }
 
+    // Wait for the signal only if the new file is not available.
     if (mNextFd == -1) {
-        ALOGW("No FileDescriptor for next recording");
-        return INVALID_OPERATION;
+        status_t res = mFdCond.waitRelative(mLock, kFdCondWaitTimeoutNs);
+        if (res != OK) {
+            ALOGW("No FileDescriptor for next recording");
+            return INVALID_OPERATION;
+        }
     }
 
     mSwitchPending = true;
@@ -2433,6 +2442,7 @@
         return INVALID_OPERATION;
     }
     mNextFd = dup(fd);
+    mFdCond.signal();
     return OK;
 }
 
@@ -3768,6 +3778,12 @@
             if (mStszTableEntries->count() == 0) {
                 mFirstSampleTimeRealUs = systemTime() / 1000;
                 if (timestampUs < 0 && mFirstSampleStartOffsetUs == 0) {
+                    if (WARN_UNLESS(timestampUs != INT64_MIN, "for %s track", trackName)) {
+                        copy->release();
+                        mSource->stop();
+                        mIsMalformed = true;
+                        break;
+                    }
                     mFirstSampleStartOffsetUs = -timestampUs;
                     timestampUs = 0;
                 }
@@ -4886,8 +4902,15 @@
             int32_t mediaTime = (mFirstSampleStartOffsetUs * mTimeScale + 5E5) / 1E6;
             int32_t firstSampleOffsetTicks =
                     (mFirstSampleStartOffsetUs * mvhdTimeScale + 5E5) / 1E6;
-            // samples before 0 don't count in for duration, hence subtract firstSampleOffsetTicks.
-            addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+            if (tkhdDurationTicks >= firstSampleOffsetTicks) {
+                // samples before 0 don't count in for duration, hence subtract
+                // firstSampleOffsetTicks.
+                addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+            } else {
+                ALOGW("The track header duration %" PRId64
+                      " is smaller than the first sample offset %" PRId64,
+                      mTrackDurationUs, mFirstSampleStartOffsetUs);
+            }
         } else {
             // Track starting at zero.
             ALOGV("No edit list entry required for this track");
@@ -4923,6 +4946,8 @@
             // Track with start offset.
             ALOGV("Tracks starting > 0");
             int32_t editDurationTicks = 0;
+            int32_t trackStartOffsetBFramesUs = getMinCttsOffsetTimeUs() - kMaxCttsOffsetTimeUs;
+            ALOGV("trackStartOffsetBFramesUs:%" PRId32, trackStartOffsetBFramesUs);
             if (mMinCttsOffsetTicks == mMaxCttsOffsetTicks) {
                 // Video with no B frame or non-video track.
                 editDurationTicks =
@@ -4931,8 +4956,6 @@
                 ALOGV("editDuration:%" PRId64 "us", (trackStartOffsetUs + movieStartOffsetBFramesUs));
             } else {
                 // Track with B frame.
-                int32_t trackStartOffsetBFramesUs = getMinCttsOffsetTimeUs() - kMaxCttsOffsetTimeUs;
-                ALOGV("trackStartOffsetBFramesUs:%" PRId32, trackStartOffsetBFramesUs);
                 editDurationTicks =
                         ((trackStartOffsetUs + movieStartOffsetBFramesUs +
                           trackStartOffsetBFramesUs) * mvhdTimeScale + 5E5) / 1E6;
@@ -4946,7 +4969,15 @@
             } else if (editDurationTicks < 0) {
                 // Only video tracks with B Frames would hit this case.
                 ALOGV("Edit list entry to negate start offset by B frames in other tracks");
-                addOneElstTableEntry(tkhdDurationTicks, std::abs(editDurationTicks), 1, 0);
+                if (com::android::media::editing::flags::
+                        stagefrightrecorder_enable_b_frames()) {
+                    int32_t mediaTimeTicks =
+                            ((trackStartOffsetUs + movieStartOffsetBFramesUs +
+                              trackStartOffsetBFramesUs) * mTimeScale - 5E5) / 1E6;
+                    addOneElstTableEntry(tkhdDurationTicks, std::abs(mediaTimeTicks), 1, 0);
+                } else {
+                    addOneElstTableEntry(tkhdDurationTicks, std::abs(editDurationTicks), 1, 0);
+                }
             } else {
                 ALOGV("No edit list entry needed for this track");
             }
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 0401e82..9abe037 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -487,7 +487,7 @@
                                     .id = getId(mClient),
                                     .name = mCodecName,
                                     .importance = mImportance};
-        return std::move(clientInfo);
+        return clientInfo;
     }
 
 private:
@@ -840,7 +840,7 @@
     const sp<AMessage> mNotify;
 };
 
-class OnBufferReleasedListener : public ::android::BnProducerListener{
+class OnBufferReleasedListener : public ::android::SurfaceListener{
 private:
     uint32_t mGeneration;
     std::weak_ptr<BufferChannelBase> mBufferChannel;
@@ -852,6 +852,13 @@
         }
     }
 
+    void notifyBufferAttached() {
+        auto p = mBufferChannel.lock();
+        if (p) {
+            p->onBufferAttachedToOutputSurface(mGeneration);
+        }
+    }
+
 public:
     explicit OnBufferReleasedListener(
             uint32_t generation,
@@ -864,11 +871,22 @@
         notifyBufferReleased();
     }
 
+    void onBuffersDiscarded([[maybe_unused]] const std::vector<sp<GraphicBuffer>>& buffers)
+        override { }
+
     void onBufferDetached([[maybe_unused]] int slot) override {
         notifyBufferReleased();
     }
 
     bool needsReleaseNotify() override { return true; }
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(BQ_CONSUMER_ATTACH_CALLBACK)
+    void onBufferAttached() override {
+        notifyBufferAttached();
+    }
+
+    bool needsAttachNotify() override { return true; }
+#endif
 };
 
 class BufferCallback : public CodecBase::BufferCallback {
@@ -2000,6 +2018,7 @@
     int32_t flags;
     CHECK(buffer->meta()->findInt32("flags", &flags));
     if (flags & BUFFER_FLAG_DECODE_ONLY) {
+        ALOGV("discardDecodeOnlyOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
         info->mOwnedByClient = false;
         info->mData.clear();
         mBufferChannel->discardBuffer(buffer);
@@ -2444,7 +2463,12 @@
             mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
         }
     } else if (mFlags & kFlagIsSecure) {
-        ALOGW("Crypto or descrambler should be given for secure codec");
+        if (android::media::codec::provider_->secure_codecs_require_crypto()) {
+            mErrorLog.log(LOG_TAG, "Crypto or descrambler must be given for secure codec");
+            return INVALID_OPERATION;
+        } else {
+            ALOGW("Crypto or descrambler should be given for secure codec");
+        }
     }
 
     if (mConfigureMsg != nullptr) {
@@ -4474,9 +4498,16 @@
                 {
                     /* size_t index = */updateBuffers(kPortIndexInput, msg);
 
-                    if (mState == FLUSHING
-                            || mState == STOPPING
-                            || mState == RELEASING) {
+                    bool inStateToReturnBuffers =
+                        mState == FLUSHING || mState == STOPPING || mState == RELEASING;
+                    if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
+                        // Late callbacks from the codec could arrive here
+                        // after the codec is already stopped or released.
+                        inStateToReturnBuffers = mState == FLUSHING ||
+                                                 mState == STOPPING || mState == INITIALIZED ||
+                                                 mState == RELEASING || mState == UNINITIALIZED;
+                    }
+                    if (inStateToReturnBuffers) {
                         returnBuffersToCodecOnPort(kPortIndexInput);
                         break;
                     }
@@ -4555,9 +4586,16 @@
 
                     /* size_t index = */updateBuffers(kPortIndexOutput, msg);
 
-                    if (mState == FLUSHING
-                            || mState == STOPPING
-                            || mState == RELEASING) {
+                    bool inStateToReturnBuffers =
+                        mState == FLUSHING || mState == STOPPING || mState == RELEASING;
+                    if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
+                        // Late callbacks from the codec could arrive here
+                        // after the codec is already stopped or released.
+                        inStateToReturnBuffers = mState == FLUSHING ||
+                                                 mState == STOPPING || mState == INITIALIZED ||
+                                                 mState == RELEASING || mState == UNINITIALIZED;
+                    }
+                    if (inStateToReturnBuffers) {
                         returnBuffersToCodecOnPort(kPortIndexOutput);
                         break;
                     }
@@ -5923,7 +5961,7 @@
     }
 
     updateHdrMetrics(false /* isConfig */);
- }
+}
 
 void MediaCodec::extractCSD(const sp<AMessage> &format) {
     mCSD.clear();
@@ -6002,7 +6040,6 @@
             return -EINVAL;
         }
         if (codecInputData->data() == NULL) {
-            ALOGV("Input buffer %zu is not properly allocated", bufferIndex);
             mErrorLog.log(LOG_TAG, base::StringPrintf(
                     "Fatal error: input buffer %zu is not properly allocated", bufferIndex));
             return -EINVAL;
@@ -6048,6 +6085,10 @@
 
         mInputFormat.clear();
         mOutputFormat.clear();
+        if (android::media::codec::provider_->codec_buffer_state_cleanup()) {
+            mCSD.clear();
+            mLeftover.clear();
+        }
         mFlags &= ~kFlagOutputFormatChanged;
         mFlags &= ~kFlagOutputBuffersChanged;
         mFlags &= ~kFlagStickyError;
@@ -6106,6 +6147,8 @@
                 ALOGD("port %d buffer %zu still owned by client when codec is reclaimed",
                         portIndex, i);
             } else {
+                ALOGV("returnBuffersToCodecOnPort: mPortBuffers[%s][%zu] NOT owned by client",
+                      portIndex == kPortIndexInput ? "in" : "out", i);
                 info->mOwnedByClient = false;
                 info->mData.clear();
             }
@@ -6458,6 +6501,7 @@
 
         // synchronization boundary for getBufferAndFormat
         Mutex::Autolock al(mBufferLock);
+        ALOGV("onQueueInputBuffer: mPortBuffers[in][%zu] NOT owned by client", index);
         info->mOwnedByClient = false;
         info->mData.clear();
 
@@ -6474,6 +6518,7 @@
     sp<AMessage> msg = mLeftover.front();
     mLeftover.pop_front();
     msg->setSize("index", index);
+    ALOGV("handleLeftover(%zu)", index);
     return onQueueInputBuffer(msg);
 }
 
@@ -6542,6 +6587,7 @@
     sp<MediaCodecBuffer> buffer;
     {
         Mutex::Autolock al(mBufferLock);
+        ALOGV("onReleaseOutputBuffer: mPortBuffers[out][%zu] NOT owned by client", index);
         info->mOwnedByClient = false;
         buffer = info->mData;
         info->mData.clear();
@@ -6654,6 +6700,8 @@
 
     {
         Mutex::Autolock al(mBufferLock);
+        ALOGV("dequeuePortBuffer: mPortBuffers[%s][%zu] checking if not owned by client",
+              portIndex == kPortIndexInput ? "in" : "out", index);
         CHECK(!info->mOwnedByClient);
         info->mOwnedByClient = true;
 
@@ -6681,8 +6729,8 @@
     if (!mDetachedSurface) {
         uint64_t usage = 0;
         if (!mSurface || mSurface->getConsumerUsage(&usage) != OK) {
-            // TODO: should we use a/the default consumer usage?
-            usage = 0;
+            // By default prepare buffer to be displayed on any of the common surfaces
+            usage = (GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_COMPOSER);
         }
         mDetachedSurface.reset(new ReleaseSurface(usage));
     }
@@ -6722,7 +6770,7 @@
             // to this surface after disconnect/connect, and those free frames would inherit the new
             // generation number. Disconnecting after setting a unique generation prevents this.
             nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
-            sp<IProducerListener> listener =
+            sp<SurfaceListener> listener =
                     new OnBufferReleasedListener(*generation, mBufferChannel);
             err = surfaceConnectWithListener(
                     surface, listener, "connectToSurface(reconnect-with-listener)");
diff --git a/media/libstagefright/MediaMuxer.cpp b/media/libstagefright/MediaMuxer.cpp
index aaf7465..1008445 100644
--- a/media/libstagefright/MediaMuxer.cpp
+++ b/media/libstagefright/MediaMuxer.cpp
@@ -249,6 +249,11 @@
         sampleMetaData.setInt32(kKeyIsMuxerData, 1);
     }
 
+    if (flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) {
+        sampleMetaData.setInt32(kKeyIsCodecConfig, true);
+        ALOGV("BUFFER_FLAG_CODEC_CONFIG");
+    }
+
     if (flags & MediaCodec::BUFFER_FLAG_EOS) {
         sampleMetaData.setInt32(kKeyIsEndOfStream, 1);
         ALOGV("BUFFER_FLAG_EOS");
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 604dcb0..74432a6 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -111,8 +111,9 @@
         }
     }
 
-    int finalUsage = usage | consumerUsage;
-    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = %#x", usage, consumerUsage, finalUsage);
+    uint64_t finalUsage = (uint32_t) usage | (uint32_t) consumerUsage;
+    ALOGV("gralloc usage: %#x(producer) + %#x(consumer) = 0x%" PRIx64,
+            usage, consumerUsage, finalUsage);
     err = native_window_set_usage(nativeWindow, finalUsage);
     if (err != NO_ERROR) {
         ALOGE("native_window_set_usage failed: %s (%d)", strerror(-err), -err);
@@ -126,7 +127,7 @@
         return err;
     }
 
-    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage %#x",
+    ALOGD("set up nativeWindow %p for %dx%d, color %#x, rotation %d, usage 0x%" PRIx64,
             nativeWindow, width, height, format, rotation, finalUsage);
     return NO_ERROR;
 }
@@ -334,7 +335,7 @@
 }
 
 status_t surfaceConnectWithListener(
-        const sp<Surface> &surface, sp<IProducerListener> listener, const char *reason) {
+        const sp<Surface> &surface, sp<SurfaceListener> listener, const char *reason) {
     ALOGD("connecting to surface %p, reason %s", surface.get(), reason);
 
     status_t err = surface->connect(NATIVE_WINDOW_API_MEDIA, listener);
diff --git a/media/libstagefright/TEST_MAPPING b/media/libstagefright/TEST_MAPPING
index b7efbce..354fab0 100644
--- a/media/libstagefright/TEST_MAPPING
+++ b/media/libstagefright/TEST_MAPPING
@@ -85,13 +85,37 @@
     // writerTest fails about 5 out of 66
     // { "name": "writerTest" },
     {
-       "name": "BatteryChecker_test"
+        "name": "BatteryChecker_test"
     },
     {
         "name": "ExtractorFactoryTest"
     },
     {
         "name": "HEVCUtilsUnitTest"
+    },
+    {
+      "name": "MctsMediaDecoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        }
+      ]
+    },
+    {
+      "name": "MctsMediaEncoderTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        }
+      ]
+    },
+    {
+      "name": "MctsMediaCodecTestCases",
+      "options": [
+        {
+          "include-annotation": "android.platform.test.annotations.Presubmit"
+        }
+      ]
     }
   ]
 }
diff --git a/media/libstagefright/VideoRenderQualityTracker.cpp b/media/libstagefright/VideoRenderQualityTracker.cpp
index eb9ac0f..bf29b1d 100644
--- a/media/libstagefright/VideoRenderQualityTracker.cpp
+++ b/media/libstagefright/VideoRenderQualityTracker.cpp
@@ -302,13 +302,6 @@
         mRenderDurationMs += (actualRenderTimeUs - mLastRenderTimeUs) / 1000;
     }
 
-    // Now that a frame has been rendered, the previously skipped frames can be processed as skipped
-    // frames since the app is not skipping them to terminate playback.
-    for (int64_t contentTimeUs : mPendingSkippedFrameContentTimeUsList) {
-        processMetricsForSkippedFrame(contentTimeUs);
-    }
-    mPendingSkippedFrameContentTimeUsList = {};
-
     // We can render a pending queued frame if it's the last frame of the video, so release it
     // immediately.
     if (contentTimeUs == mTunnelFrameQueuedContentTimeUs && mTunnelFrameQueuedContentTimeUs != -1) {
@@ -332,9 +325,25 @@
                   (long long) contentTimeUs, (long long) nextExpectedFrame.contentTimeUs);
             break;
         }
+        // Process all skipped frames before the dropped frame.
+        while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+            if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+                break;
+            }
+            processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+            mPendingSkippedFrameContentTimeUsList.pop_front();
+        }
         processMetricsForDroppedFrame(nextExpectedFrame.contentTimeUs,
                                       nextExpectedFrame.desiredRenderTimeUs);
     }
+    // Process all skipped frames before the rendered frame.
+    while (!mPendingSkippedFrameContentTimeUsList.empty()) {
+        if (mPendingSkippedFrameContentTimeUsList.front() >= nextExpectedFrame.contentTimeUs) {
+            break;
+        }
+        processMetricsForSkippedFrame(mPendingSkippedFrameContentTimeUsList.front());
+        mPendingSkippedFrameContentTimeUsList.pop_front();
+    }
     processMetricsForRenderedFrame(nextExpectedFrame.contentTimeUs,
                                    nextExpectedFrame.desiredRenderTimeUs, actualRenderTimeUs,
                                    freezeEventOut, judderEventOut);
diff --git a/media/libstagefright/colorconversion/Android.bp b/media/libstagefright/colorconversion/Android.bp
index 7ff9b10..4072bf9 100644
--- a/media/libstagefright/colorconversion/Android.bp
+++ b/media/libstagefright/colorconversion/Android.bp
@@ -36,7 +36,7 @@
         "media_plugin_headers",
     ],
 
-    static_libs: ["libyuv_static"],
+    static_libs: ["libyuv"],
 
     cflags: ["-Werror"],
 
diff --git a/media/libstagefright/colorconversion/fuzzer/Android.bp b/media/libstagefright/colorconversion/fuzzer/Android.bp
index 237e715..50a2477 100644
--- a/media/libstagefright/colorconversion/fuzzer/Android.bp
+++ b/media/libstagefright/colorconversion/fuzzer/Android.bp
@@ -27,7 +27,7 @@
 cc_defaults {
     name: "libcolorconversion_fuzzer_defaults",
     static_libs: [
-        "libyuv_static",
+        "libyuv",
         "libstagefright_color_conversion",
         "libstagefright",
         "liblog",
diff --git a/media/libstagefright/data/media_codecs_sw.xml b/media/libstagefright/data/media_codecs_sw.xml
index bfa361c..c18ab94 100644
--- a/media/libstagefright/data/media_codecs_sw.xml
+++ b/media/libstagefright/data/media_codecs_sw.xml
@@ -139,6 +139,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.hevc.decoder" type="video/hevc" variant="slow-cpu,!slow-cpu">
@@ -160,6 +161,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8" variant="slow-cpu,!slow-cpu">
@@ -178,6 +180,7 @@
                 <Limit name="bitrate" range="1-40000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
         <MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9" variant="slow-cpu,!slow-cpu">
@@ -197,9 +200,30 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Attribute name="software-codec" />
         </MediaCodec>
-        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
+        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu">
+            <Limit name="alignment" value="1x1" />
+            <Limit name="block-size" value="16x16" />
+            <Variant name="!slow-cpu">
+                <Limit name="size" min="2x2" max="2048x2048" />
+                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
+                <Limit name="blocks-per-second" range="1-245760" />
+                <Limit name="bitrate" range="1-40000000" />
+            </Variant>
+            <Variant name="slow-cpu">
+                <Limit name="size" min="2x2" max="1280x1280" />
+                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
+                <Limit name="blocks-per-second" range="1-108000" />
+                <Limit name="bitrate" range="1-5000000" />
+            </Variant>
+            <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
+            <Feature name="low-latency" />
+            <Attribute name="software-codec" />
+        </MediaCodec>
+        <MediaCodec name="c2.android.av1.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
             <!-- TODO: implement a mechanism to prevent AV1 Decoder usage on pre-U devices -->
             <Limit name="alignment" value="1x1" />
             <Limit name="block-size" value="16x16" />
@@ -216,25 +240,7 @@
                 <Limit name="bitrate" range="1-5000000" />
             </Variant>
             <Feature name="adaptive-playback" />
-            <Feature name="low-latency" />
-            <Attribute name="software-codec" />
-        </MediaCodec>
-        <MediaCodec name="c2.android.av1-dav1d.decoder" type="video/av01" variant="slow-cpu,!slow-cpu" rank="1024">
-            <Limit name="alignment" value="1x1" />
-            <Limit name="block-size" value="16x16" />
-            <Variant name="!slow-cpu">
-                <Limit name="size" min="2x2" max="2048x2048" />
-                <Limit name="block-count" range="1-8192" /> <!-- max 2048x1024 -->
-                <Limit name="blocks-per-second" range="1-245760" />
-                <Limit name="bitrate" range="1-40000000" />
-            </Variant>
-            <Variant name="slow-cpu">
-                <Limit name="size" min="2x2" max="1280x1280" />
-                <Limit name="block-count" range="1-3600" /> <!-- max 1280x720 -->
-                <Limit name="blocks-per-second" range="1-108000" />
-                <Limit name="bitrate" range="1-5000000" />
-            </Variant>
-            <Feature name="adaptive-playback" />
+            <Feature name="dynamic-color-aspects" />
             <Feature name="low-latency" />
             <Attribute name="software-codec" />
         </MediaCodec>
@@ -326,7 +332,7 @@
                 <!-- profiles and levels:  ProfileBaseline : Level3 -->
                 <Limit name="block-count" range="1-1620" />
                 <Limit name="blocks-per-second" range="1-40500" />
-                <Limit name="bitrate" range="1-2000000" />
+                <Limit name="bitrate" range="1-10000000" />
             </Variant>
             <Feature name="intra-refresh" />
             <!-- Video Quality control -->
diff --git a/media/libstagefright/include/ACodecBufferChannel.h b/media/libstagefright/include/ACodecBufferChannel.h
index 946d533..46a5183 100644
--- a/media/libstagefright/include/ACodecBufferChannel.h
+++ b/media/libstagefright/include/ACodecBufferChannel.h
@@ -155,8 +155,8 @@
     // obtained. Inside BufferInfo, mBufferId and mSharedEncryptedBuffer are
     // immutable objects. We write internal states of mClient/CodecBuffer when
     // the caller has given up the reference, so that access is also safe.
-    std::shared_ptr<const std::vector<const BufferInfo>> mInputBuffers;
-    std::shared_ptr<const std::vector<const BufferInfo>> mOutputBuffers;
+    std::shared_ptr<const std::vector<BufferInfo>> mInputBuffers;
+    std::shared_ptr<const std::vector<BufferInfo>> mOutputBuffers;
 
     sp<MemoryDealer> makeMemoryDealer(size_t heapSize);
 
diff --git a/media/libstagefright/include/FrameDecoder.h b/media/libstagefright/include/FrameDecoder.h
index e417324..94c201f 100644
--- a/media/libstagefright/include/FrameDecoder.h
+++ b/media/libstagefright/include/FrameDecoder.h
@@ -18,12 +18,15 @@
 #define FRAME_DECODER_H_
 
 #include <memory>
+#include <mutex>
+#include <queue>
 #include <vector>
 
-#include <media/stagefright/foundation/AString.h>
-#include <media/stagefright/foundation/ABase.h>
-#include <media/stagefright/MediaSource.h>
 #include <media/openmax/OMX_Video.h>
+#include <media/stagefright/MediaSource.h>
+#include <media/stagefright/foundation/ABase.h>
+#include <media/stagefright/foundation/AHandler.h>
+#include <media/stagefright/foundation/AString.h>
 #include <ui/GraphicTypes.h>
 
 namespace android {
@@ -34,11 +37,23 @@
 class MediaCodecBuffer;
 class Surface;
 class VideoFrame;
+struct AsyncCodecHandler;
 
 struct FrameRect {
     int32_t left, top, right, bottom;
 };
 
+struct InputBufferIndexQueue {
+public:
+    void enqueue(int32_t index);
+    bool dequeue(int32_t* index, int32_t timeOutUs);
+
+private:
+    std::queue<int32_t> mQueue;
+    std::mutex mMutex;
+    std::condition_variable mCondition;
+};
+
 struct FrameDecoder : public RefBase {
     FrameDecoder(
             const AString &componentName,
@@ -53,7 +68,19 @@
             const sp<MetaData> &trackMeta, int colorFormat,
             bool thumbnail = false, uint32_t bitDepth = 0);
 
+    status_t handleInputBufferAsync(int32_t index);
+    status_t handleOutputBufferAsync(int32_t index, int64_t timeUs);
+    status_t handleOutputFormatChangeAsync(sp<AMessage> format);
+
+    enum {
+        kWhatCallbackNotify,
+    };
+
 protected:
+    AString mComponentName;
+    sp<AMessage> mOutputFormat;
+    bool mUseBlockModel;
+
     virtual ~FrameDecoder();
 
     virtual sp<AMessage> onGetFormatAndSeekOptions(
@@ -64,14 +91,12 @@
 
     virtual status_t onExtractRect(FrameRect *rect) = 0;
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer,
-            MetaDataBase &sampleMeta,
-            bool firstSample,
-            uint32_t *flags) = 0;
+    virtual status_t onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                     bool firstSample, uint32_t* flags) = 0;
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) = 0;
@@ -83,7 +108,6 @@
     void setFrame(const sp<IMemory> &frameMem) { mFrameMemory = frameMem; }
 
 private:
-    AString mComponentName;
     sp<MetaData> mTrackMeta;
     sp<IMediaSource> mSource;
     OMX_COLOR_FORMATTYPE mDstFormat;
@@ -92,17 +116,32 @@
     sp<IMemory> mFrameMemory;
     MediaSource::ReadOptions mReadOptions;
     sp<MediaCodec> mDecoder;
-    sp<AMessage> mOutputFormat;
+    sp<AsyncCodecHandler> mHandler;
+    sp<ALooper> mAsyncLooper;
     bool mHaveMoreInputs;
     bool mFirstSample;
+    bool mHandleOutputBufferAsyncDone;
     sp<Surface> mSurface;
+    std::mutex mMutex;
+    std::condition_variable mOutputFramePending;
+    InputBufferIndexQueue mInputBufferIndexQueue;
 
     status_t extractInternal();
+    status_t extractInternalUsingBlockModel();
 
     DISALLOW_EVIL_CONSTRUCTORS(FrameDecoder);
 };
 struct FrameCaptureLayer;
 
+struct AsyncCodecHandler : public AHandler {
+public:
+    explicit AsyncCodecHandler(const wp<FrameDecoder>& frameDecoder);
+    virtual void onMessageReceived(const sp<AMessage>& msg);
+
+private:
+    wp<FrameDecoder> mFrameDecoder;
+};
+
 struct VideoFrameDecoder : public FrameDecoder {
     VideoFrameDecoder(
             const AString &componentName,
@@ -121,14 +160,12 @@
         return (rect == NULL) ? OK : ERROR_UNSUPPORTED;
     }
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer,
-            MetaDataBase &sampleMeta,
-            bool firstSample,
-            uint32_t *flags) override;
+    virtual status_t onInputReceived(uint8_t* data, size_t size, MetaDataBase& sampleMeta,
+                                     bool firstSample, uint32_t* flags) override;
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) override;
@@ -162,14 +199,13 @@
 
     virtual status_t onExtractRect(FrameRect *rect) override;
 
-    virtual status_t onInputReceived(
-            const sp<MediaCodecBuffer> &codecBuffer __unused,
-            MetaDataBase &sampleMeta __unused,
-            bool firstSample __unused,
-            uint32_t *flags __unused) override { return OK; }
+    virtual status_t onInputReceived(uint8_t* __unused, size_t __unused,
+                                     MetaDataBase& sampleMeta __unused, bool firstSample __unused,
+                                     uint32_t* flags __unused) override { return OK; }
 
     virtual status_t onOutputReceived(
-            const sp<MediaCodecBuffer> &videoFrameBuffer,
+            uint8_t* data,
+            sp<ABuffer> imgObj,
             const sp<AMessage> &outputFormat,
             int64_t timeUs,
             bool *done) override;
diff --git a/media/libstagefright/include/media/stagefright/CameraSource.h b/media/libstagefright/include/media/stagefright/CameraSource.h
index fcd17b9..f42e315 100644
--- a/media/libstagefright/include/media/stagefright/CameraSource.h
+++ b/media/libstagefright/include/media/stagefright/CameraSource.h
@@ -236,7 +236,7 @@
     // Start of members protected by mBatchLock
     std::deque<uint32_t> mInflightBatchSizes;
     std::vector<native_handle_t*> mInflightReturnedHandles;
-    std::vector<const sp<IMemory>> mInflightReturnedMemorys;
+    std::vector<sp<IMemory>> mInflightReturnedMemorys;
     // End of members protected by mBatchLock
 
     void releaseQueuedFrames();
diff --git a/media/libstagefright/include/media/stagefright/CodecBase.h b/media/libstagefright/include/media/stagefright/CodecBase.h
index bffb294..c6087b0 100644
--- a/media/libstagefright/include/media/stagefright/CodecBase.h
+++ b/media/libstagefright/include/media/stagefright/CodecBase.h
@@ -517,6 +517,15 @@
     };
 
     /**
+     * Notify a buffer is attached to output surface.
+     *
+     * @param     generation    MediaCodec's surface specifier
+     */
+    virtual void onBufferAttachedToOutputSurface(uint32_t /*generation*/) {
+        // default: no-op
+    };
+
+    /**
      * Discard a buffer to the underlying CodecBase object.
      *
      * TODO: remove once this operation can be handled by just clearing the
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 054a4b8..ee75129 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -144,6 +144,7 @@
     std::mutex mFallocMutex;
     bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
     uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
+    Condition mFdCond;
 
     List<Track *> mTracks;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 72785d5..b1cf665 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -562,6 +562,30 @@
     }
 }
 
+inline constexpr int32_t DTS_HDProfileHRA = 0x1;
+inline constexpr int32_t DTS_HDProfileLBR = 0x2;
+inline constexpr int32_t DTS_HDProfileMA = 0x4;
+
+inline static const char *asString_Dts_HDProfile(int32_t i, const char *def = "??") {
+    switch (i) {
+        case DTS_HDProfileHRA:  return "HRA";
+        case DTS_HDProfileLBR:  return "LBR";
+        case DTS_HDProfileMA:   return "MA";
+        default:                return def;
+    }
+}
+
+inline constexpr int32_t DTS_UHDProfileP1 = 0x1;
+inline constexpr int32_t DTS_UHDProfileP2 = 0x2;
+
+inline static const char *asString_Dts_UHDProfile(int32_t i, const char *def = "??") {
+    switch (i) {
+        case DTS_UHDProfileP1:  return "P1";
+        case DTS_UHDProfileP2:  return "P2";
+        default:                return def;
+    }
+}
+
 inline constexpr int32_t BITRATE_MODE_CBR = 2;
 inline constexpr int32_t BITRATE_MODE_CBR_FD = 3;
 inline constexpr int32_t BITRATE_MODE_CQ = 0;
@@ -729,8 +753,13 @@
 inline constexpr char MIMETYPE_AUDIO_FLAC[] = "audio/flac";
 inline constexpr char MIMETYPE_AUDIO_MSGSM[] = "audio/gsm";
 inline constexpr char MIMETYPE_AUDIO_AC3[] = "audio/ac3";
+inline constexpr char MIMETYPE_AUDIO_AC4[] = "audio/ac4";
 inline constexpr char MIMETYPE_AUDIO_EAC3[] = "audio/eac3";
+inline constexpr char MIMETYPE_AUDIO_EAC3_JOC[] = "audio/eac3-joc";
 inline constexpr char MIMETYPE_AUDIO_SCRAMBLED[] = "audio/scrambled";
+inline constexpr char MIMETYPE_AUDIO_DTS[] = "audio/vnd.dts";
+inline constexpr char MIMETYPE_AUDIO_DTS_HD[] = "audio/vnd.dts.hd";
+inline constexpr char MIMETYPE_AUDIO_DTS_UHD[] = "audio/vnd.dts.uhd";
 
 inline constexpr char MIMETYPE_IMAGE_ANDROID_HEIC[] = "image/vnd.android.heic";
 
@@ -882,7 +911,6 @@
 inline constexpr int32_t CRYPTO_MODE_AES_CBC     = 2;
 inline constexpr int32_t CRYPTO_MODE_AES_CTR     = 1;
 inline constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
-inline constexpr int32_t INFO_OUTPUT_BUFFERS_CHANGED = -3;
 inline constexpr int32_t INFO_OUTPUT_FORMAT_CHANGED  = -2;
 inline constexpr int32_t INFO_TRY_AGAIN_LATER        = -1;
 inline constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT               = 1;
diff --git a/media/libstagefright/include/media/stagefright/SurfaceUtils.h b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
index eccb413..882a5ab 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
@@ -27,7 +27,7 @@
 namespace android {
 
 struct HDRStaticInfo;
-class IProducerListener;
+class SurfaceListener;
 
 /**
  * Configures |nativeWindow| for given |width|x|height|, pixel |format|, |rotation| and |usage|.
@@ -45,7 +45,7 @@
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason);
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason);
 status_t surfaceConnectWithListener(const sp<Surface> &surface,
-        sp<IProducerListener> listener, const char *reason);
+        sp<SurfaceListener> listener, const char *reason);
 
 /**
  * Disable buffer dropping behavior of BufferQueue if target sdk of application
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 79ab009..630817c 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -20,9 +20,6 @@
 cc_library_shared {
     name: "libstagefright_omx",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
@@ -218,9 +215,6 @@
 cc_library_shared {
     name: "libstagefright_omx_utils",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
     srcs: ["OMXUtils.cpp"],
     export_include_dirs: [
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 4183023..4ab5d10 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -619,6 +619,13 @@
                 if (!isValidOMXParam(outParams)) {
                     return OMX_ErrorBadParameter;
                 }
+                if (offsetof(DescribeHDR10PlusInfoParams, nValue) + outParams->nParamSize >
+                    outParams->nSize) {
+                    ALOGE("b/329641908: too large param size; nParamSize=%u nSize=%u",
+                          outParams->nParamSize, outParams->nSize);
+                    android_errorWriteLog(0x534e4554, "329641908");
+                    return OMX_ErrorBadParameter;
+                }
 
                 outParams->nParamSizeUsed = info->size();
 
diff --git a/media/libstagefright/renderfright/Android.bp b/media/libstagefright/renderfright/Android.bp
index 22b13f6..bb850ca 100644
--- a/media/libstagefright/renderfright/Android.bp
+++ b/media/libstagefright/renderfright/Android.bp
@@ -84,9 +84,6 @@
     name: "librenderfright",
     defaults: ["librenderfright_defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     cflags: [
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.cpp b/media/libstagefright/renderfright/gl/ProgramCache.cpp
index 350f0b7..ad6dd03 100644
--- a/media/libstagefright/renderfright/gl/ProgramCache.cpp
+++ b/media/libstagefright/renderfright/gl/ProgramCache.cpp
@@ -683,7 +683,7 @@
             fs << "uniform mat4 inputTransformMatrix;";
             fs << R"__SHADER__(
                 highp vec3 InputTransform(const highp vec3 color) {
-                    return clamp(vec3(inputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+                    return vec3(inputTransformMatrix * vec4(color, 1.0));
                 }
             )__SHADER__";
         } else {
diff --git a/media/libstagefright/rtsp/fuzzer/Android.bp b/media/libstagefright/rtsp/fuzzer/Android.bp
index a2791ba..ff64af5 100644
--- a/media/libstagefright/rtsp/fuzzer/Android.bp
+++ b/media/libstagefright/rtsp/fuzzer/Android.bp
@@ -29,11 +29,19 @@
     header_libs: [
         "libstagefright_rtsp_headers",
     ],
-    fuzz_config:{
+    fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback@google.com",
         ],
         componentid: 155276,
+        hotlists: [
+            "4593311",
+        ],
+        description: "This fuzzer targets the APIs of libstagefright_rtsp",
+        vector: "local_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -44,7 +52,7 @@
     ],
     defaults: [
         "libstagefright_rtsp_fuzzer_defaults",
-    ]
+    ],
 }
 
 cc_fuzz {
@@ -55,7 +63,7 @@
     defaults: [
         "libstagefright_rtsp_fuzzer_defaults",
     ],
-    shared_libs:[
+    shared_libs: [
         "libandroid_net",
         "libbase",
         "libstagefright",
diff --git a/media/libstagefright/tests/fuzzers/Android.bp b/media/libstagefright/tests/fuzzers/Android.bp
index 2bcfd67..43542c5 100644
--- a/media/libstagefright/tests/fuzzers/Android.bp
+++ b/media/libstagefright/tests/fuzzers/Android.bp
@@ -32,6 +32,15 @@
         "liblog",
         "media_permission-aidl-cpp",
     ],
+    fuzz_config: {
+        componentid: 42195,
+        hotlists: ["4593311"],
+        description: "The fuzzer targets the APIs of libstagefright",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
+    },
 }
 
 cc_fuzz {
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
index 9f46a74..b29429a 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.cpp
@@ -21,105 +21,256 @@
 #include <media/stagefright/MPEG2TSWriter.h>
 #include <media/stagefright/MPEG4Writer.h>
 #include <media/stagefright/OggWriter.h>
-
-#include "MediaMimeTypes.h"
-
 #include <webm/WebmWriter.h>
 
 namespace android {
-std::string genMimeType(FuzzedDataProvider *dataProvider) {
-    uint8_t idx = dataProvider->ConsumeIntegralInRange<uint8_t>(0, kMimeTypes.size() - 1);
-    return std::string(kMimeTypes[idx]);
-}
 
-sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, std::string mimeType,
-                                      uint16_t maxDataAmount) {
-    uint32_t dataBlobSize = dataProvider->ConsumeIntegralInRange<uint16_t>(0, maxDataAmount);
-    std::vector<uint8_t> data = dataProvider->ConsumeBytes<uint8_t>(dataBlobSize);
-    // data:[<mediatype>][;base64],<data>
-    std::string uri("data:");
-    uri += mimeType;
-    // Currently libstagefright only accepts base64 uris
-    uri += ";base64,";
-    android::AString out;
-    android::encodeBase64(data.data(), data.size(), &out);
-    uri += out.c_str();
-
-    sp<DataSource> source =
-        DataSourceFactory::getInstance()->CreateFromURI(NULL /* httpService */, uri.c_str());
-
-    if (source == NULL) {
-        return NULL;
-    }
-
-    return MediaExtractorFactory::Create(source);
-}
-
-sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize) {
-    std::string mime = genMimeType(dataProvider);
-    sp<IMediaExtractor> extractor = genMediaExtractor(dataProvider, mime, maxMediaBlobSize);
-
-    if (extractor == NULL) {
-        return NULL;
-    }
-
-    for (size_t i = 0; i < extractor->countTracks(); ++i) {
-        sp<MetaData> meta = extractor->getTrackMetaData(i);
-
-        std::string trackMime = dataProvider->PickValueInArray(kTestedMimeTypes);
-        if (!strcasecmp(mime.c_str(), trackMime.c_str())) {
-            sp<IMediaSource> track = extractor->getTrack(i);
-            if (track == NULL) {
-                return NULL;
-            }
-            return new CallbackMediaSource(track);
-        }
-    }
-
-    return NULL;
-}
-
-sp<MediaWriter> createWriter(int fd, StandardWriters writerType, sp<MetaData> fileMeta) {
+sp<MediaWriter> createWriter(int fd, StandardWriters writerType, sp<MetaData> writerMeta,
+                             FuzzedDataProvider* fdp) {
     sp<MediaWriter> writer;
+
+    if (fdp->ConsumeBool()) {
+        writerMeta->setInt32(kKeyRealTimeRecording, fdp->ConsumeBool());
+    }
+
     switch (writerType) {
-        case OGG:
-            writer = new OggWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
-            break;
         case AAC:
-            writer = new AACWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADIF);
+            writer = sp<AACWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADIF);
+            }
             break;
         case AAC_ADTS:
-            writer = new AACWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADTS);
-            break;
-        case WEBM:
-            writer = new WebmWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
-            break;
-        case MPEG4:
-            writer = new MPEG4Writer(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+            writer = sp<AACWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AAC_ADTS);
+            }
             break;
         case AMR_NB:
-            writer = new AMRWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+            writer = sp<AMRWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_NB);
+            }
             break;
         case AMR_WB:
-            writer = new AMRWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+            writer = sp<AMRWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_AMR_WB);
+            }
             break;
         case MPEG2TS:
-            writer = new MPEG2TSWriter(fd);
-            fileMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG2TS);
+            writer = sp<MPEG2TSWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG2TS);
+            }
             break;
-        default:
-            return nullptr;
+        case MPEG4:
+            writer = sp<MPEG4Writer>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_MPEG_4);
+            } else if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_HEIF);
+            } else if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_THREE_GPP);
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKey2ByteNalLength, fdp->ConsumeBool());
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyTimeScale,
+                                     fdp->ConsumeIntegralInRange<int32_t>(600, 96000));
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKey4BitTrackIds, fdp->ConsumeBool());
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt64(kKeyTrackTimeStatus, fdp->ConsumeIntegral<int64_t>());
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyRotation, fdp->ConsumeIntegralInRange<uint8_t>(0, 3) * 90);
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt64(kKeyTime, fdp->ConsumeIntegral<int64_t>());
+            }
+            break;
+        case OGG:
+            writer = sp<OggWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_OGG);
+            }
+            break;
+        case WEBM:
+            writer = sp<WebmWriter>::make(fd);
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyFileType, output_format::OUTPUT_FORMAT_WEBM);
+            }
+
+            if (fdp->ConsumeBool()) {
+                writerMeta->setInt32(kKeyTimeScale,
+                                     fdp->ConsumeIntegralInRange<int32_t>(600, 96000));
+            }
+            break;
     }
-    if (writer != nullptr) {
-        fileMeta->setInt32(kKeyRealTimeRecording, false);
-    }
+
     return writer;
 }
+
+sp<FuzzSource> createSource(StandardWriters writerType, FuzzedDataProvider* fdp) {
+    sp<MetaData> meta = sp<MetaData>::make();
+
+    switch (writerType) {
+        case AAC:
+        case AAC_ADTS:
+            meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+            meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegralInRange<uint8_t>(1, 7));
+            meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyAACProfile, fdp->ConsumeIntegral<int32_t>());
+            }
+            break;
+        case AMR_NB:
+            meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_NB);
+            meta->setInt32(kKeyChannelCount, 1);
+            meta->setInt32(kKeySampleRate, 8000);
+            break;
+        case AMR_WB:
+            meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_WB);
+            meta->setInt32(kKeyChannelCount, 1);
+            meta->setInt32(kKeySampleRate, 16000);
+            break;
+        case MPEG2TS:
+            if (fdp->ConsumeBool()) {
+                meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
+                meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+                meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+            } else {
+                meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
+                // The +1s ensure a minimum height and width of 1.
+                meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+                meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+            }
+            break;
+        case MPEG4: {
+            auto mime = fdp->PickValueInArray<std::string>(kMpeg4MimeTypes);
+            meta->setCString(kKeyMIMEType, mime.c_str());
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyBackgroundMode, fdp->ConsumeBool());
+            }
+
+            if (!strncasecmp(mime.c_str(), "audio/", 6)) {
+                meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+                meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+
+            } else {
+                // The +1s ensure a minimum height and width of 1.
+                meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+                meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyDisplayWidth, fdp->ConsumeIntegral<uint16_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyDisplayHeight, fdp->ConsumeIntegral<uint16_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyTileWidth, fdp->ConsumeIntegral<uint16_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyTileHeight, fdp->ConsumeIntegral<uint16_t>());
+                }
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyGridRows, fdp->ConsumeIntegral<uint8_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyGridCols, fdp->ConsumeIntegral<uint8_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyTemporalLayerCount, fdp->ConsumeIntegral<int32_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeySARWidth, fdp->ConsumeIntegral<uint16_t>());
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeySARHeight, fdp->ConsumeIntegral<uint16_t>());
+                }
+            }
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyBitRate, fdp->ConsumeIntegral<int32_t>());
+            }
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyMaxBitRate, fdp->ConsumeIntegral<int32_t>());
+            }
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyTrackIsDefault, fdp->ConsumeBool());
+            }
+            break;
+        }
+        case OGG:
+            meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS);
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+            }
+
+            if (fdp->ConsumeBool()) {
+                meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+            }
+            break;
+        case WEBM:
+            if (fdp->ConsumeBool()) {
+                if (fdp->ConsumeBool()) {
+                    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP8);
+                } else {
+                    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_VP9);
+                }
+
+                if (fdp->ConsumeBool()) {
+                    // The +1s ensure a minimum height and width of 1.
+                    meta->setInt32(kKeyWidth, fdp->ConsumeIntegral<uint16_t>() + 1);
+                    meta->setInt32(kKeyHeight, fdp->ConsumeIntegral<uint16_t>() + 1);
+                }
+            } else {
+                if (fdp->ConsumeBool()) {
+                    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_VORBIS);
+                } else {
+                    meta->setCString(kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_OPUS);
+                }
+
+                if (fdp->ConsumeBool()) {
+                    meta->setInt32(kKeyChannelCount, fdp->ConsumeIntegral<int32_t>());
+                }
+                meta->setInt32(kKeySampleRate, fdp->PickValueInArray<uint32_t>(kSampleRateTable));
+            }
+
+            break;
+    }
+
+    return sp<FuzzSource>::make(meta, fdp);
+}
 }  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
index 6856ac0..ad1218b 100644
--- a/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
+++ b/media/libstagefright/tests/fuzzers/FuzzerMediaUtility.h
@@ -15,20 +15,52 @@
  */
 
 #pragma once
-#include <datasource/DataSourceFactory.h>
+
 #include <fuzzer/FuzzedDataProvider.h>
-#include <android/IMediaExtractor.h>
-#include <media/IMediaHTTPService.h>
-#include <media/mediarecorder.h>
-#include <media/stagefright/CallbackMediaSource.h>
+
+#include <media/stagefright/MediaBuffer.h>
 #include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaExtractorFactory.h>
 #include <media/stagefright/MediaWriter.h>
-#include <media/stagefright/MetaData.h>
-#include <media/stagefright/foundation/base64.h>
-#include <utils/StrongPointer.h>
 
 namespace android {
+class FuzzSource : public MediaSource {
+  public:
+    FuzzSource(sp<MetaData> meta, FuzzedDataProvider* fdp) : mMetaData(meta), mFdp(fdp) {}
+
+    status_t start(MetaData*) { return OK; }
+
+    virtual status_t stop() { return OK; }
+
+    status_t read(MediaBufferBase** buffer, const ReadOptions*) {
+        // Ensuring that mBuffer has at least two bytes to avoid check failure
+        // in MPEG2TSWriter::SourceInfo::onMessageReceived().
+        if (mFdp->remaining_bytes() > 2) {
+            auto size = mFdp->ConsumeIntegralInRange<uint8_t>(2, INT8_MAX);
+            mBuffer = mFdp->ConsumeBytes<uint8_t>(size);
+            MediaBufferBase* mbb = new MediaBuffer(mBuffer.data(), mBuffer.size());
+
+            size_t length = mFdp->ConsumeIntegralInRange<size_t>(2, mbb->size());
+            size_t offset = mFdp->ConsumeIntegralInRange<size_t>(0, mbb->size() - length);
+            mbb->set_range(offset, length);
+
+            mbb->meta_data().setInt32(kKeyIsEndOfStream, mFdp->ConsumeBool());
+            mbb->meta_data().setInt64(kKeyTime, mFdp->ConsumeIntegral<uint32_t>() / 2);
+            *buffer = mbb;
+
+            return OK;
+        }
+
+        return ERROR_END_OF_STREAM;
+    }
+
+    sp<MetaData> getFormat() { return mMetaData; }
+
+  private:
+    sp<MetaData> mMetaData = nullptr;
+    FuzzedDataProvider* mFdp = nullptr;
+    std::vector<uint8_t> mBuffer;
+};
+
 enum StandardWriters {
     OGG,
     AAC,
@@ -42,54 +74,22 @@
     kMaxValue = MPEG2TS,
 };
 
-static std::string kTestedMimeTypes[] = {"audio/3gpp",
-                                         "audio/amr-wb",
-                                         "audio/vorbis",
-                                         "audio/opus",
-                                         "audio/mp4a-latm",
-                                         "audio/mpeg",
-                                         "audio/mpeg-L1",
-                                         "audio/mpeg-L2",
-                                         "audio/midi",
-                                         "audio/qcelp",
-                                         "audio/g711-alaw",
-                                         "audio/g711-mlaw",
-                                         "audio/flac",
-                                         "audio/aac-adts",
-                                         "audio/gsm",
-                                         "audio/ac3",
-                                         "audio/eac3",
-                                         "audio/eac3-joc",
-                                         "audio/ac4",
-                                         "audio/scrambled",
-                                         "audio/alac",
-                                         "audio/x-ms-wma",
-                                         "audio/x-adpcm-ms",
-                                         "audio/x-adpcm-dvi-ima",
-                                         "video/avc",
-                                         "video/hevc",
-                                         "video/mp4v-es",
-                                         "video/3gpp",
-                                         "video/x-vnd.on2.vp8",
-                                         "video/x-vnd.on2.vp9",
-                                         "video/av01",
-                                         "video/mpeg2",
-                                         "video/dolby-vision",
-                                         "video/scrambled",
-                                         "video/divx",
-                                         "video/divx3",
-                                         "video/xvid",
-                                         "video/x-motion-jpeg",
-                                         "text/3gpp-tt",
-                                         "application/x-subrip",
-                                         "text/vtt",
-                                         "text/cea-608",
-                                         "text/cea-708",
-                                         "application/x-id3v4"};
+static const uint32_t kSampleRateTable[] = {
+        8000, 11025, 12000, 16000, 22050, 24000, 32000, 44100, 48000, 64000, 88200, 96000,
+};
+static const std::string kMpeg4MimeTypes[] = {
+        MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC, MEDIA_MIMETYPE_IMAGE_AVIF,
 
-std::string genMimeType(FuzzedDataProvider *dataProvider);
-sp<IMediaExtractor> genMediaExtractor(FuzzedDataProvider *dataProvider, uint16_t dataAmount);
-sp<MediaSource> genMediaSource(FuzzedDataProvider *dataProvider, uint16_t maxMediaBlobSize);
+        MEDIA_MIMETYPE_VIDEO_AV1,          MEDIA_MIMETYPE_VIDEO_AVC,
+        MEDIA_MIMETYPE_VIDEO_HEVC,         MEDIA_MIMETYPE_VIDEO_MPEG4,
+        MEDIA_MIMETYPE_VIDEO_H263,         MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
 
-sp<MediaWriter> createWriter(int32_t fd, StandardWriters writerType, sp<MetaData> fileMeta);
+        MEDIA_MIMETYPE_AUDIO_AMR_NB,       MEDIA_MIMETYPE_AUDIO_AMR_WB,
+        MEDIA_MIMETYPE_AUDIO_AAC,
+};
+
+sp<MediaWriter> createWriter(int32_t fd, StandardWriters writerType, sp<MetaData> writerMeta,
+                             FuzzedDataProvider* fdp);
+
+sp<FuzzSource> createSource(StandardWriters writerType, FuzzedDataProvider* fdp);
 }  // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
index 70d73c8..5ac2a54 100644
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.cpp
@@ -13,94 +13,221 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
 
-#include <MediaMuxerFuzzer.h>
-#include <cutils/ashmem.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/stagefright/MediaMuxer.h>
 #include <media/stagefright/foundation/ABuffer.h>
 #include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
 
 namespace android {
+const uint8_t kMinSize = 0;
+const uint8_t kMinTrackCount = 0;
 
-// Can't seem to get setBuffer or setString working. It always segfaults on a
-// null pointer read or memleaks. So that functionality is missing.
-void createMessage(AMessage *msg, FuzzedDataProvider *fdp) {
-  size_t count = fdp->ConsumeIntegralInRange<size_t>(0, 32);
-  while (fdp->remaining_bytes() > 0 && count > 0) {
-    uint8_t function_id =
-        fdp->ConsumeIntegralInRange<uint8_t>(0, amessage_setvals.size() - 1);
-    amessage_setvals[function_id](msg, fdp);
-    count--;
-  }
+enum kBufferFlags { BUFFER_FLAG_SYNCFRAME = 1, BUFFER_FLAG_CODECCONFIG = 2, BUFFER_FLAG_EOS = 4 };
+
+constexpr char kMuxerFile[] = "MediaMuxer";
+
+const std::string kAudioMimeTypes[] = {
+        MEDIA_MIMETYPE_AUDIO_AMR_NB,
+        MEDIA_MIMETYPE_AUDIO_AMR_WB,
+        MEDIA_MIMETYPE_AUDIO_MPEG,
+        MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
+        MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
+        MEDIA_MIMETYPE_AUDIO_MIDI,
+        MEDIA_MIMETYPE_AUDIO_AAC,
+        MEDIA_MIMETYPE_AUDIO_QCELP,
+        MEDIA_MIMETYPE_AUDIO_VORBIS,
+        MEDIA_MIMETYPE_AUDIO_OPUS,
+        MEDIA_MIMETYPE_AUDIO_G711_ALAW,
+        MEDIA_MIMETYPE_AUDIO_G711_MLAW,
+        MEDIA_MIMETYPE_AUDIO_RAW,
+        MEDIA_MIMETYPE_AUDIO_FLAC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS,
+        MEDIA_MIMETYPE_AUDIO_MSGSM,
+        MEDIA_MIMETYPE_AUDIO_AC3,
+        MEDIA_MIMETYPE_AUDIO_EAC3,
+        MEDIA_MIMETYPE_AUDIO_EAC3_JOC,
+        MEDIA_MIMETYPE_AUDIO_AC4,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_MHA1,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_MHM1,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L3,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_BL_L4,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L3,
+        MEDIA_MIMETYPE_AUDIO_MPEGH_LC_L4,
+        MEDIA_MIMETYPE_AUDIO_SCRAMBLED,
+        MEDIA_MIMETYPE_AUDIO_ALAC,
+        MEDIA_MIMETYPE_AUDIO_WMA,
+        MEDIA_MIMETYPE_AUDIO_MS_ADPCM,
+        MEDIA_MIMETYPE_AUDIO_DVI_IMA_ADPCM,
+        MEDIA_MIMETYPE_AUDIO_DTS,
+        MEDIA_MIMETYPE_AUDIO_DTS_HD,
+        MEDIA_MIMETYPE_AUDIO_DTS_HD_MA,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD_P1,
+        MEDIA_MIMETYPE_AUDIO_DTS_UHD_P2,
+        MEDIA_MIMETYPE_AUDIO_EVRC,
+        MEDIA_MIMETYPE_AUDIO_EVRCB,
+        MEDIA_MIMETYPE_AUDIO_EVRCWB,
+        MEDIA_MIMETYPE_AUDIO_EVRCNW,
+        MEDIA_MIMETYPE_AUDIO_AMR_WB_PLUS,
+        MEDIA_MIMETYPE_AUDIO_APTX,
+        MEDIA_MIMETYPE_AUDIO_DRA,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_1_0,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_0,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_MAT_2_1,
+        MEDIA_MIMETYPE_AUDIO_DOLBY_TRUEHD,
+        MEDIA_MIMETYPE_AUDIO_AAC_MP4,
+        MEDIA_MIMETYPE_AUDIO_AAC_MAIN,
+        MEDIA_MIMETYPE_AUDIO_AAC_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_SSR,
+        MEDIA_MIMETYPE_AUDIO_AAC_LTP,
+        MEDIA_MIMETYPE_AUDIO_AAC_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_SCALABLE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ERLC,
+        MEDIA_MIMETYPE_AUDIO_AAC_LD,
+        MEDIA_MIMETYPE_AUDIO_AAC_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_AAC_ELD,
+        MEDIA_MIMETYPE_AUDIO_AAC_XHE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADIF,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_MAIN,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SSR,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LTP,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_SCALABLE,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ERLC,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_LD,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_ELD,
+        MEDIA_MIMETYPE_AUDIO_AAC_ADTS_XHE,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_LC,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V1,
+        MEDIA_MIMETYPE_AUDIO_AAC_LATM_HE_V2,
+        MEDIA_MIMETYPE_AUDIO_IEC61937,
+        MEDIA_MIMETYPE_AUDIO_IEC60958,
+};
+
+const std::string kVideoMimeTypes[] = {
+        MEDIA_MIMETYPE_VIDEO_VP8,       MEDIA_MIMETYPE_VIDEO_VP9,
+        MEDIA_MIMETYPE_VIDEO_AV1,       MEDIA_MIMETYPE_VIDEO_AVC,
+        MEDIA_MIMETYPE_VIDEO_HEVC,      MEDIA_MIMETYPE_VIDEO_MPEG4,
+        MEDIA_MIMETYPE_VIDEO_H263,      MEDIA_MIMETYPE_VIDEO_MPEG2,
+        MEDIA_MIMETYPE_VIDEO_RAW,       MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
+        MEDIA_MIMETYPE_VIDEO_SCRAMBLED, MEDIA_MIMETYPE_VIDEO_DIVX,
+        MEDIA_MIMETYPE_VIDEO_DIVX3,     MEDIA_MIMETYPE_VIDEO_XVID,
+        MEDIA_MIMETYPE_VIDEO_MJPEG,
+};
+
+void getSampleAudioFormat(FuzzedDataProvider& fdp, AMessage* format) {
+    std::string mimeType = fdp.PickValueInArray(kAudioMimeTypes);
+    format->setString("mime", mimeType.c_str(), mimeType.length());
+    format->setInt32("sample-rate", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("channel-count", fdp.ConsumeIntegral<int32_t>());
+}
+
+void getSampleVideoFormat(FuzzedDataProvider& fdp, AMessage* format) {
+    std::string mimeType = fdp.PickValueInArray(kVideoMimeTypes);
+    format->setString("mime", mimeType.c_str(), mimeType.length());
+    format->setInt32("height", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("width", fdp.ConsumeIntegral<int32_t>());
+    format->setInt32("time-lapse-fps", fdp.ConsumeIntegral<int32_t>());
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-  FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+    FuzzedDataProvider fdp(data, size);
 
-  size_t data_size = fdp.ConsumeIntegralInRange<size_t>(0, size);
-  int fd = ashmem_create_region("mediamuxer_fuzz_region", data_size);
-  if (fd < 0)
+    // memfd_create() creates an anonymous file and returns a file
+    // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+    // operations on this file.
+    int32_t fd = memfd_create(kMuxerFile, MFD_ALLOW_SEALING);
+    if (fd == -1) {
+        ALOGE("memfd_create failed: %s", strerror(errno));
+        return 0;
+    }
+
+    auto outputFormat = (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(
+            MediaMuxer::OutputFormat::OUTPUT_FORMAT_MPEG_4,
+            MediaMuxer::OutputFormat::OUTPUT_FORMAT_LIST_END);
+
+    sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, outputFormat);
+    if (mMuxer == nullptr) {
+        close(fd);
+        return 0;
+    }
+
+    // Used to consume a maximum of 80% of the data to send buffer data to writeSampleData().
+    // This ensures that we don't completely exhaust data and use the rest 20% for fuzzing
+    // of APIs.
+    const size_t kMaxSize = (size * 80) / 100;
+    while (fdp.remaining_bytes()) {
+        auto invokeMediaMuxerAPI = fdp.PickValueInArray<const std::function<void()>>({
+                [&]() {
+                    // Using 'return' here due to a timeout bug present in OGGWriter.cpp
+                    // (b/310316183).
+                    if (outputFormat == MediaMuxer::OutputFormat::OUTPUT_FORMAT_OGG) {
+                        return;
+                    }
+
+                    sp<AMessage> format = sp<AMessage>::make();
+                    fdp.ConsumeBool() ? getSampleAudioFormat(fdp, format.get())
+                                      : getSampleVideoFormat(fdp, format.get());
+
+                    mMuxer->addTrack(fdp.ConsumeBool() ? format : nullptr);
+                },
+                [&]() {
+                    mMuxer->setLocation(fdp.ConsumeIntegral<int32_t>() /* latitude */,
+                                        fdp.ConsumeIntegral<int32_t>() /* longitude */);
+                },
+                [&]() { mMuxer->setOrientationHint(fdp.ConsumeIntegral<int32_t>() /* degrees */); },
+                [&]() { mMuxer->start(); },
+                [&]() {
+                    std::vector<uint8_t> sample = fdp.ConsumeBytes<uint8_t>(
+                            fdp.ConsumeIntegralInRange<size_t>(kMinSize, kMaxSize));
+                    sp<ABuffer> buffer = sp<ABuffer>::make(sample.data(), sample.size());
+
+                    size_t offset = fdp.ConsumeIntegralInRange<size_t>(kMinSize, sample.size());
+                    size_t length =
+                            fdp.ConsumeIntegralInRange<size_t>(kMinSize, buffer->size() - offset);
+                    buffer->setRange(offset, length);
+
+                    sp<AMessage> meta = buffer->meta();
+                    meta->setInt64("sample-file-offset", fdp.ConsumeIntegral<int64_t>());
+                    meta->setInt64("last-sample-index-in-chunk", fdp.ConsumeIntegral<int64_t>());
+
+                    uint32_t flags = 0;
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_SYNCFRAME;
+                    }
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_CODECCONFIG;
+                    }
+                    if (fdp.ConsumeBool()) {
+                        flags |= kBufferFlags::BUFFER_FLAG_EOS;
+                    }
+
+                    size_t trackIndex = fdp.ConsumeBool()
+                                                ? fdp.ConsumeIntegralInRange<size_t>(
+                                                          kMinTrackCount, mMuxer->getTrackCount())
+                                                : fdp.ConsumeIntegral<size_t>();
+                    int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
+                    mMuxer->writeSampleData(fdp.ConsumeBool() ? buffer : nullptr, trackIndex,
+                                            timeUs, flags);
+                },
+                [&]() {
+                    mMuxer->getTrackFormat(
+                            fdp.ConsumeBool() ? fdp.ConsumeIntegralInRange<size_t>(
+                                                        kMinTrackCount, mMuxer->getTrackCount())
+                                              : fdp.ConsumeIntegral<size_t>() /* idx */);
+                },
+                [&]() { mMuxer->stop(); },
+        });
+
+        invokeMediaMuxerAPI();
+    }
+
+    close(fd);
     return 0;
-
-  uint8_t *sh_data = static_cast<uint8_t *>(
-      mmap(NULL, data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0));
-  if (sh_data == MAP_FAILED)
-    return 0;
-
-  MediaMuxer::OutputFormat format =
-      (MediaMuxer::OutputFormat)fdp.ConsumeIntegralInRange<int32_t>(0, 4);
-  sp<MediaMuxer> mMuxer = MediaMuxer::create(fd, format);
-  if (mMuxer == nullptr) {
-    return 0;
-  }
-
-  while (fdp.remaining_bytes() > 1) {
-    switch (fdp.ConsumeIntegralInRange<uint8_t>(0, 4)) {
-    case 0: {
-      // For some reason it only likes mp4s here...
-      if (format == 1 || format == 4)
-        break;
-
-      sp<AMessage> a_format(new AMessage);
-      createMessage(a_format.get(), &fdp);
-      mMuxer->addTrack(a_format);
-      break;
-    }
-    case 1: {
-      mMuxer->start();
-      break;
-    }
-    case 2: {
-      int degrees = fdp.ConsumeIntegral<int>();
-      mMuxer->setOrientationHint(degrees);
-      break;
-    }
-    case 3: {
-      int latitude = fdp.ConsumeIntegral<int>();
-      int longitude = fdp.ConsumeIntegral<int>();
-      mMuxer->setLocation(latitude, longitude);
-      break;
-    }
-    case 4: {
-      size_t buf_size = fdp.ConsumeIntegralInRange<size_t>(0, data_size);
-      sp<ABuffer> a_buffer(new ABuffer(buf_size));
-
-      size_t trackIndex = fdp.ConsumeIntegral<size_t>();
-      int64_t timeUs = fdp.ConsumeIntegral<int64_t>();
-      uint32_t flags = fdp.ConsumeIntegral<uint32_t>();
-      mMuxer->writeSampleData(a_buffer, trackIndex, timeUs, flags);
-    }
-    }
-  }
-
-  if (fdp.ConsumeBool())
-    mMuxer->stop();
-
-  munmap(sh_data, data_size);
-  close(fd);
-  return 0;
 }
 } // namespace android
diff --git a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h b/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
deleted file mode 100644
index 7d4421d..0000000
--- a/media/libstagefright/tests/fuzzers/MediaMuxerFuzzer.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright 2020 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
-
-#pragma once
-
-#include <fuzzer/FuzzedDataProvider.h>
-#include <media/stagefright/foundation/AMessage.h>
-
-namespace android {
-
-// Mappings vectors are the list of attributes that the MediaMuxer
-// class looks for in the message.
-static std::vector<const char *> floatMappings{
-    "capture-rate",
-    "time-lapse-fps",
-    "frame-rate",
-};
-
-static std::vector<const char *> int64Mappings{
-    "exif-offset",    "exif-size", "target-time",
-    "thumbnail-time", "timeUs",    "durationUs",
-};
-
-static std::vector<const char *> int32Mappings{"loop",
-                                               "time-scale",
-                                               "crypto-mode",
-                                               "crypto-default-iv-size",
-                                               "crypto-encrypted-byte-block",
-                                               "crypto-skip-byte-block",
-                                               "frame-count",
-                                               "max-bitrate",
-                                               "pcm-big-endian",
-                                               "temporal-layer-count",
-                                               "temporal-layer-id",
-                                               "thumbnail-width",
-                                               "thumbnail-height",
-                                               "track-id",
-                                               "valid-samples",
-                                               "color-format",
-                                               "ca-system-id",
-                                               "is-sync-frame",
-                                               "bitrate",
-                                               "max-bitrate",
-                                               "width",
-                                               "height",
-                                               "sar-width",
-                                               "sar-height",
-                                               "display-width",
-                                               "display-height",
-                                               "is-default",
-                                               "tile-width",
-                                               "tile-height",
-                                               "grid-rows",
-                                               "grid-cols",
-                                               "rotation-degrees",
-                                               "channel-count",
-                                               "sample-rate",
-                                               "bits-per-sample",
-                                               "channel-mask",
-                                               "encoder-delay",
-                                               "encoder-padding",
-                                               "is-adts",
-                                               "frame-rate",
-                                               "max-height",
-                                               "max-width",
-                                               "max-input-size",
-                                               "haptic-channel-count",
-                                               "pcm-encoding",
-                                               "aac-profile"};
-
-static const std::vector<std::function<void(AMessage *, FuzzedDataProvider *)>>
-    amessage_setvals = {
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setRect("crop", fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>(),
-                       fdp->ConsumeIntegral<int32_t>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setFloat(floatMappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, floatMappings.size() - 1)],
-                        fdp->ConsumeFloatingPoint<float>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setInt64(int64Mappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, int64Mappings.size() - 1)],
-                        fdp->ConsumeIntegral<int64_t>());
-        },
-        [](AMessage *msg, FuzzedDataProvider *fdp) -> void {
-          msg->setInt32(int32Mappings[fdp->ConsumeIntegralInRange<size_t>(
-                            0, int32Mappings.size() - 1)],
-                        fdp->ConsumeIntegral<int32_t>());
-        }};
-} // namespace android
diff --git a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
index 97d1160..cd0a866 100644
--- a/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
+++ b/media/libstagefright/tests/fuzzers/WriterFuzzer.cpp
@@ -13,216 +13,49 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-// Authors: corbin.souffrant@leviathansecurity.com
-//          dylan.katz@leviathansecurity.com
-
-#include <android-base/file.h>
-#include <android/content/AttributionSourceState.h>
-#include <ctype.h>
-#include <media/mediarecorder.h>
-#include <media/stagefright/MPEG4Writer.h>
-#include <media/stagefright/MediaDefs.h>
-#include <stdlib.h>
-#include <utils/StrongPointer.h>
-#include <utils/Vector.h>
-
-#include <functional>
-#include <string>
 
 #include "FuzzerMediaUtility.h"
-#include "fuzzer/FuzzedDataProvider.h"
-
-static constexpr uint16_t kMaxOperations = 5000;
-static constexpr uint8_t kMaxPackageNameLen = 50;
-// For other strings in mpeg we want a higher limit.
-static constexpr uint16_t kMaxMPEGStrLen = 1000;
-static constexpr uint16_t kMaxMediaBlobSize = 1000;
 
 namespace android {
 
-using android::content::AttributionSourceState;
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
 
-std::string getFourCC(FuzzedDataProvider *fdp) {
-    std::string fourCC = fdp->ConsumeRandomLengthString(4);
-    // Replace any existing nulls
-    for (size_t pos = 0; pos < fourCC.length(); pos++) {
-        if (fourCC.at(pos) == '\0') {
-            fourCC.replace(pos, 1, "a");
-        }
+    // memfd_create() creates an anonymous file and returns a file
+    // descriptor that refers to it. MFD_ALLOW_SEALING allows sealing
+    // operations on this file.
+    int32_t fd = memfd_create("WriterFuzzer", MFD_ALLOW_SEALING);
+    if (fd == -1) {
+        ALOGE("memfd_create() failed: %s", strerror(errno));
+        return 0;
     }
 
-    // If our string is too short, fill the remainder with "a"s.
-    while (fourCC.length() < 4) {
-        fourCC += 'a';
-    }
-    return fourCC;
-}
+    StandardWriters writerType = fdp.ConsumeEnum<StandardWriters>();
+    sp<MetaData> writerMeta = sp<MetaData>::make();
 
-typedef std::vector<std::function<void(FuzzedDataProvider*,
-                                    sp<MediaWriter>, sp<MetaData>, int tmpFileFd)>> OperationVec;
-typedef std::vector<std::function<void(FuzzedDataProvider*, MPEG4Writer*)>> MPEG4OperationVec;
-static const OperationVec operations = {
-    [](FuzzedDataProvider*, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->pause();
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
-        bool valid_fd = dataProvider->ConsumeBool();
-        int fd = -1;
-        if (valid_fd) {
-            fd = tmpFd;
-        }
-        // Args don't seem to be used
-        Vector<String16> args;
-        mediaWriter->dump(fd, args);
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int tmpFd) {
-        bool valid_fd = dataProvider->ConsumeBool();
-        int fd = -1;
-        if (valid_fd) {
-            fd = tmpFd;
-        }
-        mediaWriter->setNextFd(fd);
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setCaptureRate(dataProvider->ConsumeFloatingPoint<float>());
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setStartTimeOffsetMs(dataProvider->ConsumeIntegral<int>());
-
-        // Likely won't do much, but might as well as do a quick check
-        // while we're here.
-        mediaWriter->getStartTimeOffsetMs();
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
-    },
-    [](FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter, sp<MetaData>, int) {
-        mediaWriter->setMaxFileDuration(dataProvider->ConsumeIntegral<int64_t>());
-    },
-};
-
-static const MPEG4OperationVec mpeg4Operations = {
-    [](FuzzedDataProvider*, MPEG4Writer *mediaWriter) { mediaWriter->notifyApproachingLimit(); },
-    // Lower level write methods.
-    // High-level startBox/endBox/etc are all called elsewhere,
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint8_t val = dataProvider->ConsumeIntegral<uint8_t>();
-        mediaWriter->writeInt8(val);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint16_t val = dataProvider->ConsumeIntegral<uint16_t>();
-        mediaWriter->writeInt16(val);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint32_t val = dataProvider->ConsumeIntegral<uint32_t>();
-        mediaWriter->writeInt32(val);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint64_t val = dataProvider->ConsumeIntegral<uint64_t>();
-        mediaWriter->writeInt64(val);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        std::string strVal = dataProvider->ConsumeRandomLengthString(kMaxMPEGStrLen);
-        mediaWriter->writeCString(strVal.c_str());
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        std::string fourCC = getFourCC(dataProvider);
-        mediaWriter->writeFourcc(fourCC.c_str());
-    },
-
-    // Misc setters
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint32_t layers = dataProvider->ConsumeIntegral<uint32_t>();
-        mediaWriter->setTemporalLayerCount(layers);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        uint32_t duration = dataProvider->ConsumeIntegral<uint32_t>();
-        mediaWriter->setInterleaveDuration(duration);
-    },
-    [](FuzzedDataProvider *dataProvider, MPEG4Writer *mediaWriter) {
-        int lat = dataProvider->ConsumeIntegral<int>();
-        int lon = dataProvider->ConsumeIntegral<int>();
-        mediaWriter->setGeoData(lat, lon);
-    },
-};
-
-// Not all writers can always add new sources, so we'll need additional checks.
-void addSource(FuzzedDataProvider *dataProvider, sp<MediaWriter> mediaWriter) {
-    sp<MediaSource> mediaSource = genMediaSource(dataProvider, kMaxMediaBlobSize);
-    if (mediaSource == NULL) {
-        // There's a static check preventing NULLs in addSource.
-        return;
-    }
-    mediaWriter->addSource(mediaSource);
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
-    FuzzedDataProvider dataProvider(data, size);
-    TemporaryFile tf;
-    sp<MetaData> fileMeta = new MetaData;
-    StandardWriters writerType = dataProvider.ConsumeEnum<StandardWriters>();
-    sp<MediaWriter> writer = createWriter(tf.fd, writerType, fileMeta);
-
-    AttributionSourceState attributionSource;
-    attributionSource.packageName = dataProvider.ConsumeRandomLengthString(kMaxPackageNameLen);
-    attributionSource.uid = dataProvider.ConsumeIntegral<int32_t>();
-    attributionSource.pid = dataProvider.ConsumeIntegral<int32_t>();
-    attributionSource.token = sp<BBinder>::make();
-    sp<MediaRecorder> mr = new MediaRecorder(attributionSource);
-    writer->setListener(mr);
-
-    uint8_t baseOpLen = operations.size();
-    uint8_t totalLen = baseOpLen;
-    uint8_t maxSources;
-    // Different writers support different amounts of sources.
-    switch (writerType) {
-        case StandardWriters::AAC:
-        case StandardWriters::AAC_ADTS:
-        case StandardWriters::AMR_NB:
-        case StandardWriters::AMR_WB:
-        case StandardWriters::OGG:
-            maxSources = 1;
-            break;
-        case StandardWriters::WEBM:
-            maxSources = 2;
-            break;
-        default:
-            maxSources = UINT8_MAX;
-            break;
-    }
-    // Initialize some number of sources and add them to our writer.
-    uint8_t sourceCount = dataProvider.ConsumeIntegralInRange<uint8_t>(0, maxSources);
-    for (uint8_t i = 0; i < sourceCount; i++) {
-        addSource(&dataProvider, writer);
+    sp<MediaWriter> writer = createWriter(fd, writerType, writerMeta, &fdp);
+    if (writer == nullptr) {
+        close(fd);
+        return 0;
     }
 
-    // Increase our range if additional operations are implemented.
-    // Currently only MPEG4 has additiona public operations on their writer.
-    if (writerType == StandardWriters::MPEG4) {
-        totalLen += mpeg4Operations.size();
+    if (writerType == StandardWriters::WEBM) {
+        // This range is set to avoid CHECK failure in WEBMWriter::reset() -> EbmlVoid::EBmlVoid().
+        writer->setMaxFileSize(fdp.ConsumeIntegralInRange<int64_t>(5 * 1024 * 1024, INT64_MAX));
+    } else {
+        writer->setMaxFileSize(fdp.ConsumeIntegral<int64_t>());
     }
+    writer->setMaxFileDuration(fdp.ConsumeIntegral<int64_t>());
+    writer->setCaptureRate(fdp.ConsumeFloatingPoint<float>());
 
-    // Many operations require the writer to be started.
-    writer->start(fileMeta.get());
-    for (size_t ops_run = 0; dataProvider.remaining_bytes() > 0 && ops_run < kMaxOperations - 1;
-            ops_run++) {
-        uint8_t op = dataProvider.ConsumeIntegralInRange<uint8_t>(0, totalLen - 1);
-        if (op < baseOpLen) {
-            operations[op](&dataProvider, writer, fileMeta, tf.fd);
-        } else if (writerType == StandardWriters::MPEG4) {
-            mpeg4Operations[op - baseOpLen](&dataProvider, (MPEG4Writer*)writer.get());
-        } else {
-            // Here just in case, will error out.
-            operations[op](&dataProvider, writer, fileMeta, tf.fd);
-        }
-    }
+    sp<MediaSource> source = createSource(writerType, &fdp);
+    writer->addSource(source);
+    writer->start(writerMeta.get());
+    writer->pause();
     writer->stop();
 
-    writer.clear();
-    writer = nullptr;
+    close(fd);
+
     return 0;
 }
 }  // namespace android
diff --git a/media/libstagefright/timedtext/test/fuzzer/Android.bp b/media/libstagefright/timedtext/test/fuzzer/Android.bp
index 6590ebb..8724d51 100644
--- a/media/libstagefright/timedtext/test/fuzzer/Android.bp
+++ b/media/libstagefright/timedtext/test/fuzzer/Android.bp
@@ -48,8 +48,16 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "android-media-playback@google.com",
         ],
-        componentid: 155276,
+        componentid: 42195,
+        hotlists: [
+            "4593311",
+        ],
+        description: "This fuzzer targets the APIs of libstagefright_timedtext",
+        vector: "local_no_privileges_required",
+        service_privilege: "constrained",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
diff --git a/media/libstagefright/webm/WebmFrameThread.cpp b/media/libstagefright/webm/WebmFrameThread.cpp
index 7d1442b..e20a08d 100644
--- a/media/libstagefright/webm/WebmFrameThread.cpp
+++ b/media/libstagefright/webm/WebmFrameThread.cpp
@@ -354,6 +354,17 @@
         }
 
         MetaDataBase &md = buffer->meta_data();
+
+        if (mType == kVideoType) {
+            int32_t isCodecConfig = 0;
+            if (md.findInt32(kKeyIsCodecConfig, &isCodecConfig) && isCodecConfig) {
+                ALOGI("ignoring CSD for video track");
+                buffer->release();
+                buffer = NULL;
+                continue;
+            }
+        }
+
         CHECK(md.findInt64(kKeyTime, &timestampUs));
         if (mStartTimeUs == kUninitialized) {
             mStartTimeUs = timestampUs;
diff --git a/media/libstagefright/webm/WebmWriter.cpp b/media/libstagefright/webm/WebmWriter.cpp
index ca862b0..151ce7c 100644
--- a/media/libstagefright/webm/WebmWriter.cpp
+++ b/media/libstagefright/webm/WebmWriter.cpp
@@ -290,7 +290,7 @@
     // Max file duration limit is set
     if (mMaxFileDurationLimitUs != 0) {
         if (bitRate > 0) {
-            int64_t size2 = ((mMaxFileDurationLimitUs * bitRate * 6) / 1000 / 8000000);
+            int64_t size2 = ((mMaxFileDurationLimitUs / 1000) * bitRate * 6) / 8000000;
             if (mMaxFileSizeLimitBytes != 0 && mIsFileSizeLimitExplicitlyRequested) {
                 // When both file size and duration limits are set,
                 // we use the smaller limit of the two.
diff --git a/media/libstagefright/writer_fuzzers/Android.bp b/media/libstagefright/writer_fuzzers/Android.bp
index 58aa7cd..840c6b3c 100644
--- a/media/libstagefright/writer_fuzzers/Android.bp
+++ b/media/libstagefright/writer_fuzzers/Android.bp
@@ -24,6 +24,7 @@
     // to get the below license kinds:
     //   SPDX-license-identifier-Apache-2.0
     default_applicable_licenses: ["frameworks_av_media_libstagefright_license"],
+    default_team: "trendy_team_android_media_solutions_editing",
 }
 
 cc_defaults {
@@ -35,14 +36,17 @@
         "include",
     ],
     static_libs: [
+        "com.android.media.flags.editing-aconfig-cc",
         "liblog",
-        "libstagefright_foundation",
         "libstagefright",
+        "libstagefright_foundation",
     ],
     shared_libs: [
+        "libaconfig_storage_read_api_cc",
         "libbinder",
         "libcutils",
         "libutils",
+        "server_configurable_flags",
     ],
 }
 
@@ -96,9 +100,9 @@
 }
 
 cc_fuzz {
-    name : "mpeg4_writer_fuzzer",
-    defaults : ["writer-fuzzer-defaults"],
-    srcs : [
+    name: "mpeg4_writer_fuzzer",
+    defaults: ["writer-fuzzer-defaults"],
+    srcs: [
         "mpeg4_writer_fuzzer.cpp",
     ],
     static_libs: [
@@ -107,9 +111,9 @@
 }
 
 cc_fuzz {
-    name : "ogg_writer_fuzzer",
-    defaults : ["writer-fuzzer-defaults"],
-    srcs : [
+    name: "ogg_writer_fuzzer",
+    defaults: ["writer-fuzzer-defaults"],
+    srcs: [
         "ogg_writer_fuzzer.cpp",
     ],
     static_libs: [
@@ -118,9 +122,9 @@
 }
 
 cc_fuzz {
-    name : "webm_writer_fuzzer",
-    defaults : ["writer-fuzzer-defaults"],
-    srcs : [
+    name: "webm_writer_fuzzer",
+    defaults: ["writer-fuzzer-defaults"],
+    srcs: [
         "webm_writer_fuzzer.cpp",
     ],
     static_libs: [
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index 2f204f9..2c5e81a 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -16,9 +16,6 @@
 cc_library_shared {
     name: "libstagefright_xmlparser",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     double_loadable: true,
 
     srcs: [
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index 8c1ef3b..bd11326 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -1069,7 +1069,7 @@
         codec.rank = rank;
     }
 
-    codec.variantSet = variants;
+    codec.variantSet.insert(variants.begin(), variants.end());
 
     // we allow sets of domains...
     for (const std::string &domain : domains) {
diff --git a/media/mediaserver/Android.bp b/media/mediaserver/Android.bp
index 6ea40e3..d916fd1 100644
--- a/media/mediaserver/Android.bp
+++ b/media/mediaserver/Android.bp
@@ -54,6 +54,8 @@
 mediaserver_cc_binary {
     name: "mediaserver",
 
+    defaults: ["libcodec2_hal_selection"],
+
     srcs: ["main_mediaserver.cpp"],
 
     shared_libs: [
@@ -61,6 +63,7 @@
         "libicu",
         "libfmq",
         "libbinder",
+        "libbinder_ndk",
         "libhidlbase",
         "liblog",
         "libmediaplayerservice",
diff --git a/media/mediaserver/main_mediaserver.cpp b/media/mediaserver/main_mediaserver.cpp
index 026847a..8a62f30 100644
--- a/media/mediaserver/main_mediaserver.cpp
+++ b/media/mediaserver/main_mediaserver.cpp
@@ -17,11 +17,12 @@
 
 #define LOG_TAG "mediaserver"
 //#define LOG_NDEBUG 0
-
+#include <android/binder_process.h>
 #include <binder/IPCThreadState.h>
 #include <binder/ProcessState.h>
 #include <binder/IServiceManager.h>
 #include <hidl/HidlTransportSupport.h>
+#include <codec2/common/HalSelection.h>
 #include <utils/Log.h>
 #include "RegisterExtensions.h"
 
@@ -30,6 +31,14 @@
 
 using namespace android;
 
+namespace {
+    constexpr int kCodecThreadPoolCount = 16;
+
+    // This is the default thread count for binder thread pool
+    // if the thread count is not configured.
+    constexpr int kDefaultBinderThreadPoolCount = 15;
+}; // anonymous
+
 int main(int argc __unused, char **argv __unused)
 {
     signal(SIGPIPE, SIG_IGN);
@@ -40,8 +49,14 @@
     MediaPlayerService::instantiate();
     ResourceManagerService::instantiate();
     registerExtensions();
-    ::android::hardware::configureRpcThreadpool(16, false);
+
+    bool aidl = ::android::IsCodec2AidlHalSelected();
+    if (!aidl) {
+        ::android::hardware::configureRpcThreadpool(kCodecThreadPoolCount, false);
+    } else {
+        ABinderProcess_setThreadPoolMaxThreadCount(
+                kCodecThreadPoolCount + kDefaultBinderThreadPoolCount);
+    }
     ProcessState::self()->startThreadPool();
     IPCThreadState::self()->joinThreadPool();
-    ::android::hardware::joinRpcThreadpool();
 }
diff --git a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
index cf880c2..fe3caf3 100644
--- a/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
+++ b/media/module/aidlpersistentsurface/aidl/android/media/IAidlNode.aidl
@@ -44,7 +44,7 @@
     void setInputSurface(IAidlBufferSource bufferSource);
     void submitBuffer(
             int buffer,
-            in HardwareBuffer hBuffer,
+            in @nullable HardwareBuffer hBuffer,
             int flags,
             long timestampUs,
             in @nullable ParcelFileDescriptor fence);
diff --git a/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
index 5526b10..a5c72d6 100644
--- a/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
+++ b/media/module/aidlpersistentsurface/wrapper/WAidlGraphicBufferSource.cpp
@@ -51,19 +51,24 @@
             int32_t bufferId, uint32_t flags,
             const sp<GraphicBuffer> &buffer,
             int64_t timestamp, int fenceFd) override {
-        AHardwareBuffer *ahwBuffer = nullptr;
-        ::aidl::android::hardware::HardwareBuffer hBuffer;
+        ::ndk::ScopedFileDescriptor fence(fenceFd);
         if (buffer.get()) {
-            ahwBuffer = AHardwareBuffer_from_GraphicBuffer(buffer.get());
+            ::aidl::android::hardware::HardwareBuffer hBuffer;
+            AHardwareBuffer *ahwBuffer = AHardwareBuffer_from_GraphicBuffer(buffer.get());
             AHardwareBuffer_acquire(ahwBuffer);
             hBuffer.reset(ahwBuffer);
-        }
 
-        ::ndk::ScopedFileDescriptor fence(fenceFd);
+            return fromAidlStatus(mNode->submitBuffer(
+                    bufferId,
+                    std::move(hBuffer),
+                    flags,
+                    timestamp,
+                    fence));
+        }
 
         return fromAidlStatus(mNode->submitBuffer(
               bufferId,
-              hBuffer,
+              {},
               flags,
               timestamp,
               fence));
diff --git a/media/module/bqhelper/Android.bp b/media/module/bqhelper/Android.bp
index c4dadd0..f9b7dea 100644
--- a/media/module/bqhelper/Android.bp
+++ b/media/module/bqhelper/Android.bp
@@ -69,9 +69,6 @@
     name: "libstagefright_bufferqueue_helper",
     defaults: ["libstagefright_bufferqueue-defaults"],
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     min_sdk_version: "29",
 
     shared_libs: [ "libgui" ],
diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
index 1d2562e..3d7f0c7 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/module/bufferpool/2.0/AccessorImpl.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "BufferPoolAccessor2.0"
 //#define LOG_NDEBUG 0
 
+#include <android-base/no_destructor.h>
+
 #include <sys/types.h>
 #include <stdint.h>
 #include <time.h>
@@ -147,7 +149,25 @@
 #endif
 
 static constexpr uint32_t kSeqIdMax = 0x7fffffff;
-uint32_t Accessor::Impl::sSeqId = time(nullptr) & kSeqIdMax;
+
+Accessor::Impl::ConnectionIdGenerator::ConnectionIdGenerator() {
+    mSeqId = static_cast<uint32_t>(time(nullptr) & kSeqIdMax);
+    mPid = static_cast<int32_t>(getpid());
+}
+
+ConnectionId Accessor::Impl::ConnectionIdGenerator::getConnectionId() {
+    uint32_t seq;
+    {
+        std::lock_guard<std::mutex> l(mLock);
+        seq = mSeqId;
+        if (mSeqId == kSeqIdMax) {
+            mSeqId = 0;
+        } else {
+            ++mSeqId;
+        }
+    }
+    return (int64_t)mPid << 32 | seq | kSeqIdVndkBit;
+}
 
 Accessor::Impl::Impl(
         const std::shared_ptr<BufferPoolAllocator> &allocator)
@@ -163,13 +183,14 @@
         uint32_t *pMsgId,
         const StatusDescriptor** statusDescPtr,
         const InvalidationDescriptor** invDescPtr) {
+    static ::android::base::NoDestructor<ConnectionIdGenerator> sConIdGenerator;
     sp<Connection> newConnection = new Connection();
     ResultStatus status = ResultStatus::CRITICAL_ERROR;
     {
         std::lock_guard<std::mutex> lock(mBufferPool.mMutex);
         if (newConnection) {
             int32_t pid = getpid();
-            ConnectionId id = (int64_t)pid << 32 | sSeqId | kSeqIdVndkBit;
+            ConnectionId id = sConIdGenerator->getConnectionId();
             status = mBufferPool.mObserver.open(id, statusDescPtr);
             if (status == ResultStatus::OK) {
                 newConnection->initialize(accessor, id);
@@ -179,11 +200,6 @@
                 mBufferPool.mConnectionIds.insert(id);
                 mBufferPool.mInvalidationChannel.getDesc(invDescPtr);
                 mBufferPool.mInvalidation.onConnect(id, observer);
-                if (sSeqId == kSeqIdMax) {
-                   sSeqId = 0;
-                } else {
-                    ++sSeqId;
-                }
             }
 
         }
@@ -609,7 +625,7 @@
         }
         if (ret == false) {
             ALOGW("buffer status message processing failure - message : %d connection : %lld",
-                  message.newStatus, (long long)message.connectionId);
+                  (int)message.newStatus, (long long)message.connectionId);
         }
     }
     messages.clear();
@@ -907,7 +923,7 @@
         std::map<const std::weak_ptr<Accessor::Impl>, nsecs_t, std::owner_less<>> &accessors,
         std::mutex &mutex,
         std::condition_variable &cv) {
-    std::list<const std::weak_ptr<Accessor::Impl>> evictList;
+    std::list<std::weak_ptr<Accessor::Impl>> evictList;
     while (true) {
         int expired = 0;
         int evicted = 0;
diff --git a/media/module/bufferpool/2.0/AccessorImpl.h b/media/module/bufferpool/2.0/AccessorImpl.h
index 3d39941..2366177 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.h
+++ b/media/module/bufferpool/2.0/AccessorImpl.h
@@ -77,7 +77,14 @@
 private:
     // ConnectionId = pid : (timestamp_created + seqId)
     // in order to guarantee uniqueness for each connection
-    static uint32_t sSeqId;
+    struct ConnectionIdGenerator {
+        int32_t mPid;
+        uint32_t mSeqId;
+        std::mutex mLock;
+
+        ConnectionIdGenerator();
+        ConnectionId getConnectionId();
+    };
 
     const std::shared_ptr<BufferPoolAllocator> mAllocator;
 
diff --git a/media/module/bufferpool/2.0/Android.bp b/media/module/bufferpool/2.0/Android.bp
index 930b026..c40603c 100644
--- a/media/module/bufferpool/2.0/Android.bp
+++ b/media/module/bufferpool/2.0/Android.bp
@@ -21,6 +21,9 @@
     export_include_dirs: [
         "include",
     ],
+    header_libs: [
+        "libbase_headers",
+    ],
     shared_libs: [
         "libcutils",
         "libfmq",
@@ -60,7 +63,4 @@
     vendor_available: true,
     // TODO: b/147147992
     double_loadable: true,
-    vndk: {
-        enabled: true,
-    },
 }
diff --git a/media/module/bufferpool/2.0/BufferPoolClient.cpp b/media/module/bufferpool/2.0/BufferPoolClient.cpp
index cda23ff..66d11fa 100644
--- a/media/module/bufferpool/2.0/BufferPoolClient.cpp
+++ b/media/module/bufferpool/2.0/BufferPoolClient.cpp
@@ -762,6 +762,10 @@
     } else {
         connection = mRemoteConnection;
     }
+    if (!connection) {
+        ALOGE("connection null: fetchBufferHandle()");
+        return ResultStatus::CRITICAL_ERROR;
+    }
     ResultStatus status;
     Return<void> transResult = connection->fetch(
             transactionId, bufferId,
diff --git a/media/module/codecs/amrnb/common/include/abs_s.h b/media/module/codecs/amrnb/common/include/abs_s.h
deleted file mode 100644
index e92eaf4..0000000
--- a/media/module/codecs/amrnb/common/include/abs_s.h
+++ /dev/null
@@ -1,113 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Pathname: ./gsm-amr/c/include/abs_s.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for abs_s function.
-
- Description: Updated template to make it build for Symbian.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the abs_s function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef ABS_S_H
-#define ABS_S_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word16 abs_s(Word16 var1);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif  /* ABS_S_H */
-
-
diff --git a/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h b/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
index 8f0867a..8817621 100644
--- a/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
+++ b/media/module/codecs/amrnb/common/include/basic_op_c_equivalent.h
@@ -120,15 +120,11 @@
     {
         Word32 L_sum;
 
-        L_sum = L_var1 + L_var2;
-
-        if ((L_var1 ^ L_var2) >= 0)
+        if (__builtin_add_overflow(L_var1, L_var2, &L_sum))
         {
-            if ((L_sum ^ L_var1) < 0)
-            {
-                L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
 
         return (L_sum);
@@ -160,15 +156,11 @@
     {
         Word32 L_diff;
 
-        L_diff = L_var1 - L_var2;
-
-        if ((L_var1 ^ L_var2) < 0)
+        if (__builtin_sub_overflow(L_var1, L_var2, &L_diff))
         {
-            if ((L_diff ^ L_var1) & MIN_32)
-            {
-                L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
 
         return (L_diff);
@@ -204,16 +196,12 @@
         result = (Word32) var1 * var2;
         if (result != (Word32) 0x40000000L)
         {
-            L_sum = (result << 1) + L_var3;
-
             /* Check if L_sum and L_var_3 share the same sign */
-            if ((L_var3 ^ result) > 0)
+            if (__builtin_add_overflow((result << 1), L_var3, &L_sum))
             {
-                if ((L_sum ^ L_var3) < 0)
-                {
-                    L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
-                    *pOverflow = 1;
-                }
+                // saturating...
+                L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
+                *pOverflow = 1;
             }
         }
         else
@@ -345,14 +333,10 @@
         product32 = ((Word32) L_var1_hi * L_var2_lo) >> 15;
 
         /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
+        if (__builtin_add_overflow(L_product, (product32 << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
         }
 
         L_product = L_sum;
@@ -361,14 +345,10 @@
         product32 = ((Word32) L_var1_lo * L_var2_hi) >> 15;
 
         /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
+        if (__builtin_add_overflow(L_product, (product32 << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
         }
         return (L_sum);
     }
@@ -416,15 +396,11 @@
 
         result = ((Word32)L_var1_lo * var2) >> 15;
 
-        L_sum  =  L_product + (result << 1);
-
-        if ((L_product ^ result) > 0)
+        if (__builtin_add_overflow(L_product, (result << 1), &L_sum))
         {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
+            // saturating...
+            L_sum = (L_product < 0) ? MIN_32 : MAX_32;
+            *pOverflow = 1;
         }
         return (L_sum);
 
diff --git a/media/module/codecs/amrnb/common/include/l_add.h b/media/module/codecs/amrnb/common/include/l_add.h
deleted file mode 100644
index 136b914..0000000
--- a/media/module/codecs/amrnb/common/include/l_add.h
+++ /dev/null
@@ -1,171 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_add.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_add function.
-
- Description: Changed function prototype declaration. A pointer to the overflow
-              flag is being passed in as a parameter instead of using global
-              data.
-
- Description: Updated template. Changed paramter name from overflow to
-              pOverflow
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_add function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_ADD_H
-#define L_ADD_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        __asm
-        {
-            QADD result, L_var1, L_var2
-        }
-        return(result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var1;
-        register Word32 rb = L_var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("qadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(rb)
-                            );
-        return (result);
-
-    }
-
-#else /* C EQUIVALENT */
-
-
-    static inline Word32 L_add(register Word32 L_var1, register Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 L_sum;
-
-        L_sum = L_var1 + L_var2;
-
-        if ((L_var1 ^ L_var2) >= 0)
-        {
-            if ((L_sum ^ L_var1) < 0)
-            {
-                L_sum = (L_var1 < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-
-        return (L_sum);
-    }
-
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_ADD_H_ */
diff --git a/media/module/codecs/amrnb/common/include/l_add_c.h b/media/module/codecs/amrnb/common/include/l_add_c.h
deleted file mode 100644
index 3585a3c..0000000
--- a/media/module/codecs/amrnb/common/include/l_add_c.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_add_c.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_add_c function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag and carry flag is passed into the
-              function. Updated template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_add_c function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_ADD_C_H
-#define L_ADD_C_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_add_c(Word32 L_var1, Word32 L_var2, Flag *pOverflow, Flag *pCarry);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_ADD_C_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_mac.h b/media/module/codecs/amrnb/common/include/l_mac.h
deleted file mode 100644
index b4af3aa..0000000
--- a/media/module/codecs/amrnb/common/include/l_mac.h
+++ /dev/null
@@ -1,183 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_mac.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_mac function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: 1. Updated the function to include ARM and Linux-ARM assembly
-                 instructions.
-              2. Added OSCL_UNUSED_ARG(pOverflow) to remove compiler warnings.
-
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_mac function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MAC_H
-#define L_MAC_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 L_sum;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm {SMULBB result, var1, var2}
-        __asm {QDADD L_sum, L_var3, result}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var3;
-        register Word32 rb = var1;
-        register Word32 rc = var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(result)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(rc)
-                             : "r"(ra), "r"(result)
-                            );
-
-        return (rc);
-    }
-
-#else /* C_EQUIVALENT */
-
-    __inline Word32 L_mac(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 L_sum;
-        result = (Word32) var1 * var2;
-        if (result != (Word32) 0x40000000L)
-        {
-            L_sum = (result << 1) + L_var3;
-
-            /* Check if L_sum and L_var_3 share the same sign */
-            if ((L_var3 ^ result) > 0)
-            {
-                if ((L_sum ^ L_var3) < 0)
-                {
-                    L_sum = (L_var3 < 0) ? MIN_32 : MAX_32;
-                    *pOverflow = 1;
-                }
-            }
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_sum = MAX_32;
-        }
-        return (L_sum);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MAC_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_msu.h b/media/module/codecs/amrnb/common/include/l_msu.h
deleted file mode 100644
index 3bafb00..0000000
--- a/media/module/codecs/amrnb/common/include/l_msu.h
+++ /dev/null
@@ -1,171 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_msu.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_msu function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_msu function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MSU_H
-#define L_MSU_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-#include    "l_mult.h"
-#include    "l_sub.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 product;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            QDSUB  result, L_var3, product
-        }
-
-        return (result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var3;
-        register Word32 rb = var1;
-        register Word32 rc = var2;
-        Word32 product;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("qdsub %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(product)
-                            );
-
-        return (result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_msu(Word32 L_var3, Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        result = L_mult(var1, var2, pOverflow);
-        result = L_sub(L_var3, result, pOverflow);
-
-        return (result);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MSU_H_ */
diff --git a/media/module/codecs/amrnb/common/include/l_mult.h b/media/module/codecs/amrnb/common/include/l_mult.h
deleted file mode 100644
index 061df60..0000000
--- a/media/module/codecs/amrnb/common/include/l_mult.h
+++ /dev/null
@@ -1,178 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_mult.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_mult function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_mult function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_MULT_H
-#define L_MULT_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 result;
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            QADD   result, product, product
-        }
-
-        return (result);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = var1;
-        register Word32 rb = var2;
-        Word32 result;
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(ra), "r"(rb)
-                            );
-
-        asm volatile("qadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(product), "r"(product)
-                            );
-
-        return(result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 L_product;
-
-        L_product = (Word32) var1 * var2;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;          /* Multiply by 2 */
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_product = MAX_32;
-        }
-
-        return (L_product);
-    }
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_MULT_H */
-
diff --git a/media/module/codecs/amrnb/common/include/l_shl.h b/media/module/codecs/amrnb/common/include/l_shl.h
deleted file mode 100644
index 7b9fdb1..0000000
--- a/media/module/codecs/amrnb/common/include/l_shl.h
+++ /dev/null
@@ -1,116 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_shl.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_shl function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_shl function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SHL_H
-#define L_SHL_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_shl(Word32 L_var1, Word16 var2, Flag *pOverflow);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SHL_H_ */
-
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_shr.h b/media/module/codecs/amrnb/common/include/l_shr.h
deleted file mode 100644
index ef22073..0000000
--- a/media/module/codecs/amrnb/common/include/l_shr.h
+++ /dev/null
@@ -1,115 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
- Filename: /audio/gsm_amr/c/include/l_shr.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_shr function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_shr function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SHR_H
-#define L_SHR_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-    Word32 L_shr(Word32 L_var1, Word16 var2, Flag *pOverflow);
-
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SHR_H_ */
-
-
-
diff --git a/media/module/codecs/amrnb/common/include/l_sub.h b/media/module/codecs/amrnb/common/include/l_sub.h
deleted file mode 100644
index 97d7538..0000000
--- a/media/module/codecs/amrnb/common/include/l_sub.h
+++ /dev/null
@@ -1,173 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/l_sub.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for L_sub function.
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the L_sub function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef L_SUB_H
-#define L_SUB_H
-
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 L_sub(Word32 L_var1, Word32 L_var2, Flag *pOverflow)
-    {
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            QSUB result, L_var1, L_var2
-        }
-
-        return(result);
-
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    __inline Word32 L_sub(Word32 L_var1, Word32 L_var2, Flag *pOverflow)
-    {
-        register Word32 ra = L_var1;
-        register Word32 rb = L_var2;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("qsub %0, %1, %2"
-             : "=r"(result)
-                             : "r"(ra), "r"(rb)
-                            );
-
-        return (result);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word32 L_sub(register Word32 L_var1, register Word32 L_var2,
-                               register Flag *pOverflow)
-    {
-        Word32 L_diff;
-
-        L_diff = L_var1 - L_var2;
-
-        if ((L_var1 ^ L_var2) < 0)
-        {
-            if ((L_diff ^ L_var1) & MIN_32)
-            {
-                L_diff = (L_var1 < 0L) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-
-        return (L_diff);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _L_SUB_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/mpy_32.h b/media/module/codecs/amrnb/common/include/mpy_32.h
deleted file mode 100644
index 03f36b2..0000000
--- a/media/module/codecs/amrnb/common/include/mpy_32.h
+++ /dev/null
@@ -1,272 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/mpy_32.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Updated function prototype declaration to reflect new interface.
-              A pointer to overflow flag is passed into the function. Updated
-              template.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Updated the function to include ARM and Linux-ARM assembly
-              instructions.
-
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the Mpy_32 function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef MPY_32_H
-#define MPY_32_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 Mpy_32(Word16 L_var1_hi,
-    Word16 L_var1_lo,
-    Word16 L_var2_hi,
-    Word16 L_var2_lo,
-    Flag   *pOverflow)
-
-    {
-        /*----------------------------------------------------------------------------
-        ; Define all local variables
-        ----------------------------------------------------------------------------*/
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 product32;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        /*----------------------------------------------------------------------------
-        ; Function body here
-        ----------------------------------------------------------------------------*/
-        /* L_product = L_mult (L_var1_hi, L_var2_hi, pOverflow);*/
-
-        __asm {SMULBB L_product, L_var1_hi, L_var2_hi}
-        __asm {QDADD L_product, 0, L_product}
-        __asm {SMULBB product32, L_var1_hi, L_var2_lo}
-        product32 >>= 15;
-        __asm {QDADD L_sum, L_product, product32}
-        L_product = L_sum;
-        __asm {SMULBB product32, L_var1_lo, L_var2_hi}
-        product32 >>= 15;
-        __asm {QDADD L_sum, L_product, product32}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 Mpy_32(Word16 L_var1_hi,
-                                Word16 L_var1_lo,
-                                Word16 L_var2_hi,
-                                Word16 L_var2_lo,
-                                Flag   *pOverflow)
-    {
-        register Word32 product32;
-        register Word32 L_sum;
-        register Word32 L_product, result;
-        register Word32 ra = L_var1_hi;
-        register Word32 rb = L_var1_lo;
-        register Word32 rc = L_var2_hi;
-        register Word32 rd = L_var2_lo;
-
-
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(ra), "r"(rc)
-                            );
-        asm volatile("mov %0, #0"
-             : "=r"(result)
-                    );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_sum)
-                             : "r"(result), "r"(L_product)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product32)
-                             : "r"(ra), "r"(rd)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(ra)
-                             : "r"(product32)
-                            );
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(L_sum), "r"(ra)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product32)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(rb)
-                             : "r"(product32)
-                            );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_sum)
-                             : "r"(L_product), "r"(rb)
-                            );
-
-        return (L_sum);
-    }
-
-#else /* C_EQUIVALENT */
-
-    __inline Word32 Mpy_32(Word16 L_var1_hi,
-                           Word16 L_var1_lo,
-                           Word16 L_var2_hi,
-                           Word16 L_var2_lo,
-                           Flag   *pOverflow)
-    {
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 product32;
-
-        OSCL_UNUSED_ARG(pOverflow);
-        L_product = (Word32) L_var1_hi * L_var2_hi;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;
-        }
-        else
-        {
-            L_product = MAX_32;
-        }
-
-        /* result = mult (L_var1_hi, L_var2_lo, pOverflow); */
-        product32 = ((Word32) L_var1_hi * L_var2_lo) >> 15;
-
-        /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
-        }
-
-        L_product = L_sum;
-
-        /* result = mult (L_var1_lo, L_var2_hi, pOverflow); */
-        product32 = ((Word32) L_var1_lo * L_var2_hi) >> 15;
-
-        /* L_product = L_mac (L_product, result, 1, pOverflow); */
-        L_sum = L_product + (product32 << 1);
-
-        if ((L_product ^ product32) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-            }
-        }
-
-        /*----------------------------------------------------------------------------
-        ; Return nothing or data or data pointer
-        ----------------------------------------------------------------------------*/
-        return (L_sum);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _MPY_32_H_ */
diff --git a/media/module/codecs/amrnb/common/include/mpy_32_16.h b/media/module/codecs/amrnb/common/include/mpy_32_16.h
deleted file mode 100644
index 7eaa741..0000000
--- a/media/module/codecs/amrnb/common/include/mpy_32_16.h
+++ /dev/null
@@ -1,206 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
-  Filename: /audio/gsm_amr/c/include/mpy_32_16.h
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the Mpy_32_16 function.
-
-------------------------------------------------------------------------------
-*/
-
-/*----------------------------------------------------------------------------
-; CONTINUE ONLY IF NOT ALREADY DEFINED
-----------------------------------------------------------------------------*/
-#ifndef MPY_32_16_H
-#define MPY_32_16_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5) /* Instructions for ARM Assembly on ADS*/
-
-    __inline Word32 Mpy_32_16(Word16 L_var1_hi,
-    Word16 L_var1_lo,
-    Word16 var2,
-    Flag *pOverflow)
-    {
-
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 result;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm {SMULBB L_product, L_var1_hi, var2}
-        __asm {QDADD L_product, 0, L_product}
-        __asm {SMULBB result, L_var1_lo, var2}
-        result >>= 15;
-        __asm {QDADD L_sum, L_product, result}
-        return (L_sum);
-    }
-
-#elif defined(PV_ARM_GCC_V5) /* Instructions for ARM-linux cross-compiler*/
-
-    static inline Word32 Mpy_32_16(Word16 L_var1_hi,
-                                   Word16 L_var1_lo,
-                                   Word16 var2,
-                                   Flag *pOverflow)
-    {
-
-        register Word32 ra = L_var1_hi;
-        register Word32 rb = L_var1_lo;
-        register Word32 rc = var2;
-        Word32 result, L_product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(ra), "r"(rc)
-                            );
-        asm volatile("mov %0, #0"
-             : "=r"(result)
-                    );
-
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(L_product)
-                             : "r"(result), "r"(L_product)
-                            );
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(result)
-                             : "r"(rb), "r"(rc)
-                            );
-
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(ra)
-                             : "r"(result)
-                            );
-        asm volatile("qdadd %0, %1, %2"
-             : "=r"(result)
-                             : "r"(L_product), "r"(ra)
-                            );
-
-        return (result);
-    }
-
-#else /* C_EQUIVALENT */
-    __inline Word32 Mpy_32_16(Word16 L_var1_hi,
-                              Word16 L_var1_lo,
-                              Word16 var2,
-                              Flag *pOverflow)
-    {
-
-        Word32 L_product;
-        Word32 L_sum;
-        Word32 result;
-        L_product = (Word32) L_var1_hi * var2;
-
-        if (L_product != (Word32) 0x40000000L)
-        {
-            L_product <<= 1;
-        }
-        else
-        {
-            *pOverflow = 1;
-            L_product = MAX_32;
-        }
-
-        result = ((Word32)L_var1_lo * var2) >> 15;
-
-        L_sum  =  L_product + (result << 1);
-
-        if ((L_product ^ result) > 0)
-        {
-            if ((L_sum ^ L_product) < 0)
-            {
-                L_sum = (L_product < 0) ? MIN_32 : MAX_32;
-                *pOverflow = 1;
-            }
-        }
-        return (L_sum);
-
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _MPY_32_16_H_ */
-
-
diff --git a/media/module/codecs/amrnb/common/include/mult.h b/media/module/codecs/amrnb/common/include/mult.h
deleted file mode 100644
index 6927eba..0000000
--- a/media/module/codecs/amrnb/common/include/mult.h
+++ /dev/null
@@ -1,190 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/*
-
- Filename: /audio/gsm_amr/c/include/mult.h
-
-------------------------------------------------------------------------------
- REVISION HISTORY
-
- Description: Created separate header file for mult function.
-
- Description: Changed prototype of the mult() function. Instead of using global
-              a pointer to overflow flag is now passed into the function.
-
- Description: Updated copyright information.
-              Updated variable name from "overflow" to "pOverflow" to match
-              with original function declaration.
-
- Description: Moved _cplusplus #ifdef after Include section.
-
- Description: Providing support for ARM and Linux-ARM assembly instructions.
-
- Who:                       Date:
- Description:
-
-------------------------------------------------------------------------------
- INCLUDE DESCRIPTION
-
- This file contains all the constant definitions and prototype definitions
- needed by the mult function.
-
-------------------------------------------------------------------------------
-*/
-
-#ifndef MULT_H
-#define MULT_H
-
-/*----------------------------------------------------------------------------
-; INCLUDES
-----------------------------------------------------------------------------*/
-
-#include    "basicop_malloc.h"
-
-/*--------------------------------------------------------------------------*/
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-
-    /*----------------------------------------------------------------------------
-    ; MACROS
-    ; Define module specific macros here
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; DEFINES
-    ; Include all pre-processor statements here.
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; EXTERNAL VARIABLES REFERENCES
-    ; Declare variables used in this module but defined elsewhere
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; SIMPLE TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; ENUMERATED TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; STRUCTURES TYPEDEF'S
-    ----------------------------------------------------------------------------*/
-
-    /*----------------------------------------------------------------------------
-    ; GLOBAL FUNCTION DEFINITIONS
-    ; Function Prototype declaration
-    ----------------------------------------------------------------------------*/
-#if defined(PV_ARM_V5)
-
-    __inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        Word32 product;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        __asm
-        {
-            SMULBB product, var1, var2
-            MOV    product, product, ASR #15
-            CMP    product, 0x7FFF
-            MOVGE  product, 0x7FFF
-        }
-
-        return ((Word16) product);
-    }
-
-#elif defined(PV_ARM_GCC_V5)
-
-    __inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 ra = var1;
-        register Word32 rb = var2;
-        Word32 product;
-        Word32 temp = 0x7FFF;
-
-        OSCL_UNUSED_ARG(pOverflow);
-
-        asm volatile("smulbb %0, %1, %2"
-             : "=r"(product)
-                             : "r"(ra), "r"(rb)
-                            );
-        asm volatile("mov %0, %1, ASR #15"
-             : "=r"(product)
-                             : "r"(product)
-                            );
-        asm volatile("cmp %0, %1"
-             : "=r"(product)
-                             : "r"(temp)
-                            );
-        asm volatile("movge %0, %1"
-             : "=r"(product)
-                             : "r"(temp)
-                            );
-
-        return ((Word16) product);
-    }
-
-#else /* C EQUIVALENT */
-
-    static inline Word16 mult(Word16 var1, Word16 var2, Flag *pOverflow)
-    {
-        register Word32 product;
-
-        product = ((Word32) var1 * var2) >> 15;
-
-        /* Saturate result (if necessary). */
-        /* var1 * var2 >0x00007fff is the only case */
-        /* that saturation occurs. */
-
-        if (product > 0x00007fffL)
-        {
-            *pOverflow = 1;
-            product = (Word32) MAX_16;
-        }
-
-
-        /* Return the product as a 16 bit value by type casting Word32 to Word16 */
-
-        return ((Word16) product);
-    }
-
-#endif
-    /*----------------------------------------------------------------------------
-    ; END
-    ----------------------------------------------------------------------------*/
-#ifdef __cplusplus
-}
-#endif
-
-#endif  /* _MULT_H_ */
-
diff --git a/media/module/codecs/amrnb/common/include/n_proc.h b/media/module/codecs/amrnb/common/include/n_proc.h
deleted file mode 100644
index e5738c1..0000000
--- a/media/module/codecs/amrnb/common/include/n_proc.h
+++ /dev/null
@@ -1,31 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/****************************************************************************************
-Portions of this file are derived from the following 3GPP standard:
-
-    3GPP TS 26.073
-    ANSI-C code for the Adaptive Multi-Rate (AMR) speech codec
-    Available from http://www.3gpp.org
-
-(C) 2004, 3GPP Organizational Partners (ARIB, ATIS, CCSA, ETSI, TTA, TTC)
-Permission to distribute, modify and use this file under the standard license
-terms listed above has been obtained from the copyright holder.
-****************************************************************************************/
-/* $Id $ */
-
-void proc_head(char *mes);
diff --git a/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp b/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
index af62074..984baf8 100644
--- a/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
+++ b/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest.cpp
@@ -22,6 +22,7 @@
 
 #include <audio_utils/sndfile.h>
 #include <stdio.h>
+#include <fstream>
 
 #include "gsmamr_dec.h"
 
@@ -40,7 +41,7 @@
 
 static AmrnbDecTestEnvironment *gEnv = nullptr;
 
-class AmrnbDecoderTest : public ::testing::TestWithParam<string> {
+class AmrnbDecoderTest : public ::testing::TestWithParam<std::tuple<string, string>> {
   public:
     AmrnbDecoderTest() : mFpInput(nullptr) {}
 
@@ -54,6 +55,7 @@
     FILE *mFpInput;
     SNDFILE *openOutputFile(SF_INFO *sfInfo);
     int32_t DecodeFrames(void *amrHandle, SNDFILE *outFileHandle, int32_t frameCount = INT32_MAX);
+    bool compareBinaryFiles(const std::string& refFilePath, const std::string& outFilePath);
 };
 
 SNDFILE *AmrnbDecoderTest::openOutputFile(SF_INFO *sfInfo) {
@@ -97,6 +99,42 @@
     return 0;
 }
 
+bool AmrnbDecoderTest::compareBinaryFiles(const std::string &refFilePath,
+                                          const std::string &outFilePath) {
+    std::ifstream refFile(refFilePath, std::ios::binary | std::ios::ate);
+    std::ifstream outFile(outFilePath, std::ios::binary | std::ios::ate);
+    assert(refFile.is_open() && "Error opening reference file " + refFilePath);
+    assert(outFile.is_open() && "Error opening output file " + outFilePath);
+
+    std::streamsize refFileSize = refFile.tellg();
+    std::streamsize outFileSize = outFile.tellg();
+    if (refFileSize != outFileSize) {
+        ALOGE("Error, File size mismatch: Reference file size = %td bytes,"
+              " but output file size = %td bytes.", refFileSize, outFileSize);
+        return false;
+    }
+
+    refFile.seekg(0, std::ios::beg);
+    outFile.seekg(0, std::ios::beg);
+    constexpr std::streamsize kBufferSize = 16 * 1024;
+    char refBuffer[kBufferSize];
+    char outBuffer[kBufferSize];
+
+    while (refFile && outFile) {
+        refFile.read(refBuffer, kBufferSize);
+        outFile.read(outBuffer, kBufferSize);
+
+        std::streamsize refBytesRead = refFile.gcount();
+        std::streamsize outBytesRead = outFile.gcount();
+
+        if (refBytesRead != outBytesRead || memcmp(refBuffer, outBuffer, refBytesRead) != 0) {
+            ALOGE("Error, File content mismatch.");
+            return false;
+        }
+    }
+    return true;
+}
+
 TEST_F(AmrnbDecoderTest, CreateAmrnbDecoderTest) {
     void *amrHandle;
     int32_t status = GSMInitDecode(&amrHandle, (Word8 *)"AMRNBDecoder");
@@ -106,7 +144,7 @@
 }
 
 TEST_P(AmrnbDecoderTest, DecodeTest) {
-    string inputFile = gEnv->getRes() + GetParam();
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     mFpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -126,10 +164,15 @@
     sf_close(outFileHandle);
     GSMDecodeFrameExit(&amrHandle);
     ASSERT_EQ(amrHandle, nullptr) << "Error deleting AMR-NB decoder";
+
+    string refFilePath = gEnv->getRes() + std::get<1>(GetParam());
+    ASSERT_TRUE(compareBinaryFiles(refFilePath, OUTPUT_FILE))
+       << "Error, Binary file comparison failed: Output file " << OUTPUT_FILE
+       << " does not match the reference file " << refFilePath << ".";
 }
 
 TEST_P(AmrnbDecoderTest, ResetDecodeTest) {
-    string inputFile = gEnv->getRes() + GetParam();
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     mFpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -159,8 +202,24 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(AmrnbDecoderTestAll, AmrnbDecoderTest,
-                         ::testing::Values(("bbb_8000hz_1ch_8kbps_amrnb_30sec.amrnb"),
-                                           ("sine_amrnb_1ch_12kbps_8000hz.amrnb")));
+                         ::testing::Values(std::make_tuple(
+                                                   "bbb_8000hz_1ch_8kbps_amrnb_30sec.amrnb",
+                                                   "bbb_8000hz_1ch_8kbps_amrnb_30sec_ref.pcm"),
+                                           std::make_tuple(
+                                                   "sine_amrnb_1ch_12kbps_8000hz.amrnb",
+                                                   "sine_amrnb_1ch_12kbps_8000hz_ref.pcm"),
+                                           std::make_tuple(
+                                                   "trim_8000hz_1ch_12kpbs_amrnb_200ms.amrnb",
+                                                   "trim_8000hz_1ch_12kpbs_amrnb_200ms_ref.pcm"),
+                                           std::make_tuple(
+                                                   "bbb_8kHz_1ch_4.75kbps_amrnb_3sec.amrnb",
+                                                   "bbb_8kHz_1ch_4.75kbps_amrnb_3sec_ref.pcm"),
+                                           std::make_tuple(
+                                                   "bbb_8kHz_1ch_10kbps_amrnb_1sec.amrnb",
+                                                   "bbb_8kHz_1ch_10kbps_amrnb_1sec_ref.pcm"),
+                                           std::make_tuple(
+                                                   "bbb_8kHz_1ch_12.2kbps_amrnb_3sec.amrnb",
+                                                   "bbb_8kHz_1ch_12.2kbps_amrnb_3sec_ref.pcm")));
 
 int main(int argc, char **argv) {
     gEnv = new AmrnbDecTestEnvironment();
diff --git a/media/module/codecs/amrnb/dec/test/AndroidTest.xml b/media/module/codecs/amrnb/dec/test/AndroidTest.xml
index 539fa5c..7b2ba15 100644
--- a/media/module/codecs/amrnb/dec/test/AndroidTest.xml
+++ b/media/module/codecs/amrnb/dec/test/AndroidTest.xml
@@ -23,17 +23,17 @@
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
         <option name="target" value="host" />
         <option name="config-filename" value="AmrnbDecoderTest" />
-        <option name="version" value="1.0"/>
+        <option name="version" value="2.0"/>
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="AmrnbDecoderTest-1.0" />
+        <option name="media-folder-name" value="AmrnbDecoderTest-2.0" />
         <option name="dynamic-config-module" value="AmrnbDecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrnbDecoderTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/AmrnbDecoderTest-1.0/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrnbDecoderTest-2.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrnb/dec/test/DynamicConfig.xml b/media/module/codecs/amrnb/dec/test/DynamicConfig.xml
index 701a752..02b869a 100644
--- a/media/module/codecs/amrnb/dec/test/DynamicConfig.xml
+++ b/media/module/codecs/amrnb/dec/test/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest-1.0.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest-2.0.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/module/codecs/amrnb/dec/test/README.md b/media/module/codecs/amrnb/dec/test/README.md
index 41fb80a..ea54975 100644
--- a/media/module/codecs/amrnb/dec/test/README.md
+++ b/media/module/codecs/amrnb/dec/test/README.md
@@ -22,15 +22,15 @@
 adb push ${OUT}/data/nativetest/AmrnbDecoderTest/AmrnbDecoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest-1.0.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/dec/test/AmrnbDecoderTest-2.0.zip). Download, unzip and push these files into device for testing.
 
 ```
-adb push AmrnbDecoderTest-1.0 /data/local/tmp/
+adb push AmrnbDecoderTest-2.0 /data/local/tmp/
 ```
 
 usage: AmrnbDecoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/AmrnbDecoderTest-1.0/
+adb shell /data/local/tmp/AmrnbDecoderTest -P /data/local/tmp/AmrnbDecoderTest-2.0/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/module/codecs/amrnb/enc/src/g_pitch.cpp b/media/module/codecs/amrnb/enc/src/g_pitch.cpp
index 5b80e2a..6f686fa 100644
--- a/media/module/codecs/amrnb/enc/src/g_pitch.cpp
+++ b/media/module/codecs/amrnb/enc/src/g_pitch.cpp
@@ -376,15 +376,11 @@
     {
         L_temp = ((Word32) * (p_xn++) * *(p_y1++));
         s1 = s;
-        s = s1 + L_temp;
 
-        if ((s1 ^ L_temp) > 0)
+        if (__builtin_add_overflow(s1, L_temp, &s))
         {
-            if ((s1 ^ s) < 0)
-            {
-                *pOverflow = 1;
-                break;
-            }
+            *pOverflow = 1;
+            break;
         }
     }
 
diff --git a/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp b/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
index fb72998..e3bd0e0 100644
--- a/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
+++ b/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest.cpp
@@ -21,6 +21,7 @@
 
 #include <audio_utils/sndfile.h>
 #include <stdio.h>
+#include <fstream>
 
 #include "gsmamr_enc.h"
 
@@ -39,7 +40,7 @@
 
 static AmrnbEncTestEnvironment *gEnv = nullptr;
 
-class AmrnbEncoderTest : public ::testing::TestWithParam<pair<string, int32_t>> {
+class AmrnbEncoderTest : public ::testing::TestWithParam<tuple<string, int32_t, string>> {
   public:
     AmrnbEncoderTest() : mAmrEncHandle(nullptr) {}
 
@@ -53,6 +54,7 @@
     AmrNbEncState *mAmrEncHandle;
     int32_t EncodeFrames(int32_t mode, FILE *fpInput, FILE *mFpOutput,
                          int32_t frameCount = INT32_MAX);
+    bool compareBinaryFiles(const string& refFilePath, const string& outFilePath);
 };
 
 int32_t AmrnbEncoderTest::EncodeFrames(int32_t mode, FILE *fpInput, FILE *mFpOutput,
@@ -87,6 +89,42 @@
     return 0;
 }
 
+bool AmrnbEncoderTest::compareBinaryFiles(const std::string &refFilePath,
+                                          const std::string &outFilePath) {
+    std::ifstream refFile(refFilePath, std::ios::binary | std::ios::ate);
+    std::ifstream outFile(outFilePath, std::ios::binary | std::ios::ate);
+    assert(refFile.is_open() && "Error opening reference file " + refFilePath);
+    assert(outFile.is_open() && "Error opening output file " + outFilePath);
+
+    std::streamsize refFileSize = refFile.tellg();
+    std::streamsize outFileSize = outFile.tellg();
+    if (refFileSize != outFileSize) {
+        ALOGE("Error, File size mismatch: Reference file size = %td bytes,"
+              " but output file size = %td bytes.", refFileSize, outFileSize);
+        return false;
+    }
+
+    refFile.seekg(0, std::ios::beg);
+    outFile.seekg(0, std::ios::beg);
+    constexpr std::streamsize kBufferSize = 16 * 1024;
+    char refBuffer[kBufferSize];
+    char outBuffer[kBufferSize];
+
+    while (refFile && outFile) {
+        refFile.read(refBuffer, kBufferSize);
+        outFile.read(outBuffer, kBufferSize);
+
+        std::streamsize refBytesRead = refFile.gcount();
+        std::streamsize outBytesRead = outFile.gcount();
+
+        if (refBytesRead != outBytesRead || memcmp(refBuffer, outBuffer, refBytesRead) != 0) {
+            ALOGE("Error, File content mismatch.");
+            return false;
+        }
+    }
+    return true;
+}
+
 TEST_F(AmrnbEncoderTest, CreateAmrnbEncoderTest) {
     mAmrEncHandle = (AmrNbEncState *)malloc(sizeof(AmrNbEncState));
     ASSERT_NE(mAmrEncHandle, nullptr) << "Error in allocating memory to Codec handle";
@@ -111,7 +149,7 @@
     int32_t status = AMREncodeInit(&mAmrEncHandle->encCtx, &mAmrEncHandle->pidSyncCtx, 0);
     ASSERT_EQ(status, 0) << "Error creating AMR-NB encoder";
 
-    string inputFile = gEnv->getRes() + GetParam().first;
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     FILE *fpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(fpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -121,7 +159,7 @@
     // Write file header.
     fwrite("#!AMR\n", 1, 6, fpOutput);
 
-    int32_t mode = GetParam().second;
+    int32_t mode = std::get<1>(GetParam());
     int32_t encodeErr = EncodeFrames(mode, fpInput, fpOutput);
     ASSERT_EQ(encodeErr, 0) << "EncodeFrames returned error for Codec mode: " << mode;
 
@@ -134,6 +172,11 @@
     free(mAmrEncHandle);
     mAmrEncHandle = nullptr;
     ALOGV("Successfully deleted encoder");
+
+    string refFilePath = gEnv->getRes() + std::get<2>(GetParam());
+    ASSERT_TRUE(compareBinaryFiles(refFilePath, OUTPUT_FILE))
+       << "Error, Binary file comparison failed: Output file " << OUTPUT_FILE
+       << " does not match the reference file " << refFilePath << ".";
 }
 
 TEST_P(AmrnbEncoderTest, ResetEncoderTest) {
@@ -142,7 +185,7 @@
     int32_t status = AMREncodeInit(&mAmrEncHandle->encCtx, &mAmrEncHandle->pidSyncCtx, 0);
     ASSERT_EQ(status, 0) << "Error creating AMR-NB encoder";
 
-    string inputFile = gEnv->getRes() + GetParam().first;
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     FILE *fpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(fpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -152,7 +195,7 @@
     // Write file header.
     fwrite("#!AMR\n", 1, 6, fpOutput);
 
-    int32_t mode = GetParam().second;
+    int32_t mode = std::get<1>(GetParam());
     // Encode kNumFrameReset first
     int32_t encodeErr = EncodeFrames(mode, fpInput, fpOutput, kNumFrameReset);
     ASSERT_EQ(encodeErr, 0) << "EncodeFrames returned error for Codec mode: " << mode;
@@ -177,22 +220,23 @@
 
 // TODO: Add more test vectors
 INSTANTIATE_TEST_SUITE_P(AmrnbEncoderTestAll, AmrnbEncoderTest,
-                         ::testing::Values(make_pair("bbb_raw_1ch_8khz_s16le.raw", MR475),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR515),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR59),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR67),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR74),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR795),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR102),
-                                           make_pair("bbb_raw_1ch_8khz_s16le.raw", MR122),
-                                           make_pair("sinesweepraw.raw", MR475),
-                                           make_pair("sinesweepraw.raw", MR515),
-                                           make_pair("sinesweepraw.raw", MR59),
-                                           make_pair("sinesweepraw.raw", MR67),
-                                           make_pair("sinesweepraw.raw", MR74),
-                                           make_pair("sinesweepraw.raw", MR795),
-                                           make_pair("sinesweepraw.raw", MR102),
-                                           make_pair("sinesweepraw.raw", MR122)));
+    ::testing::Values(
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR475, "bbb_raw_1ch_8khz_s16le_MR475_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR515, "bbb_raw_1ch_8khz_s16le_MR515_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR59, "bbb_raw_1ch_8khz_s16le_MR59_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR67, "bbb_raw_1ch_8khz_s16le_MR67_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR74, "bbb_raw_1ch_8khz_s16le_MR74_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR795, "bbb_raw_1ch_8khz_s16le_MR795_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR102, "bbb_raw_1ch_8khz_s16le_MR102_ref.amrnb"),
+        make_tuple("bbb_raw_1ch_8khz_s16le.raw", MR122, "bbb_raw_1ch_8khz_s16le_MR122_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR475, "sinesweepraw_MR475_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR515, "sinesweepraw_MR515_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR59, "sinesweepraw_MR59_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR67, "sinesweepraw_MR67_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR74, "sinesweepraw_MR74_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR795, "sinesweepraw_MR795_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR102, "sinesweepraw_MR102_ref.amrnb"),
+        make_tuple("sinesweepraw.raw", MR122, "sinesweepraw_MR122_ref.amrnb")));
 
 int main(int argc, char **argv) {
     gEnv = new AmrnbEncTestEnvironment();
diff --git a/media/module/codecs/amrnb/enc/test/AndroidTest.xml b/media/module/codecs/amrnb/enc/test/AndroidTest.xml
index 1509728..a325ee8 100644
--- a/media/module/codecs/amrnb/enc/test/AndroidTest.xml
+++ b/media/module/codecs/amrnb/enc/test/AndroidTest.xml
@@ -23,17 +23,17 @@
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
         <option name="target" value="host" />
         <option name="config-filename" value="AmrnbEncoderTest" />
-        <option name="version" value="1.0"/>
+        <option name="version" value="2.0"/>
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="AmrnbEncoderTest-1.0" />
+        <option name="media-folder-name" value="AmrnbEncoderTest-2.0" />
         <option name="dynamic-config-module" value="AmrnbEncoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrnbEncoderTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/AmrnbEncoderTest-1.0/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrnbEncoderTest-2.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrnb/enc/test/DynamicConfig.xml b/media/module/codecs/amrnb/enc/test/DynamicConfig.xml
index 713667a..fdc0daa 100644
--- a/media/module/codecs/amrnb/enc/test/DynamicConfig.xml
+++ b/media/module/codecs/amrnb/enc/test/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest-1.0.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest-2.0.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/module/codecs/amrnb/enc/test/README.md b/media/module/codecs/amrnb/enc/test/README.md
index f896bd1..c7b9964 100644
--- a/media/module/codecs/amrnb/enc/test/README.md
+++ b/media/module/codecs/amrnb/enc/test/README.md
@@ -22,15 +22,15 @@
 adb push ${OUT}/data/nativetest/AmrnbEncoderTest/AmrnbEncoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest-1.0.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrnb/enc/test/AmrnbEncoderTest-2.0.zip). Download, unzip and push these files into device for testing.
 
 ```
-adb push AmrnbEncoderTest-1.0 /data/local/tmp/
+adb push AmrnbEncoderTest-2.0 /data/local/tmp/
 ```
 
 usage: AmrnbEncoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrnbEncoderTest -P /data/local/tmp/AmrnbEncoderTest-1.0/
+adb shell /data/local/tmp/AmrnbEncoderTest -P /data/local/tmp/AmrnbEncoderTest-2.0/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp b/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
index 2cc88ce..c0e032f 100644
--- a/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
+++ b/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest.cpp
@@ -23,6 +23,7 @@
 #include <audio_utils/sndfile.h>
 #include <memory>
 #include <stdio.h>
+#include <fstream>
 
 #include "pvamrwbdecoder.h"
 #include "pvamrwbdecoder_api.h"
@@ -44,7 +45,7 @@
 
 static AmrwbDecTestEnvironment *gEnv = nullptr;
 
-class AmrwbDecoderTest : public ::testing::TestWithParam<string> {
+class AmrwbDecoderTest : public ::testing::TestWithParam<std::tuple<string, string>> {
   public:
     AmrwbDecoderTest() : mFpInput(nullptr) {}
 
@@ -59,6 +60,7 @@
     int32_t DecodeFrames(int16_t *decoderCookie, void *decoderBuf, SNDFILE *outFileHandle,
                          int32_t frameCount = INT32_MAX);
     SNDFILE *openOutputFile(SF_INFO *sfInfo);
+    bool compareBinaryFiles(const std::string& refFilePath, const std::string& outFilePath);
 };
 
 SNDFILE *AmrwbDecoderTest::openOutputFile(SF_INFO *sfInfo) {
@@ -120,6 +122,42 @@
     return 0;
 }
 
+bool AmrwbDecoderTest::compareBinaryFiles(const std::string &refFilePath,
+                                          const std::string &outFilePath) {
+    std::ifstream refFile(refFilePath, std::ios::binary | std::ios::ate);
+    std::ifstream outFile(outFilePath, std::ios::binary | std::ios::ate);
+    assert(refFile.is_open() && "Error opening reference file " + refFilePath);
+    assert(outFile.is_open() && "Error opening output file " + outFilePath);
+
+    std::streamsize refFileSize = refFile.tellg();
+    std::streamsize outFileSize = outFile.tellg();
+    if (refFileSize != outFileSize) {
+        ALOGE("Error, File size mismatch: Reference file size = %td bytes,"
+               "but output file size = %td bytes", refFileSize, outFileSize);
+        return false;
+    }
+
+    refFile.seekg(0, std::ios::beg);
+    outFile.seekg(0, std::ios::beg);
+    constexpr std::streamsize kBufferSize = 16 * 1024;
+    char refBuffer[kBufferSize];
+    char outBuffer[kBufferSize];
+
+    while (refFile && outFile) {
+        refFile.read(refBuffer, kBufferSize);
+        outFile.read(outBuffer, kBufferSize);
+
+        std::streamsize refBytesRead = refFile.gcount();
+        std::streamsize outBytesRead = outFile.gcount();
+
+        if (refBytesRead != outBytesRead || memcmp(refBuffer, outBuffer, refBytesRead) != 0) {
+            ALOGE("Error, File content mismatch.");
+            return false;
+        }
+    }
+    return true;
+}
+
 TEST_F(AmrwbDecoderTest, MultiCreateAmrwbDecoderTest) {
     uint32_t memRequirements = pvDecoder_AmrWbMemRequirements();
     std::unique_ptr<char[]> decoderBuf(new char[memRequirements]);
@@ -147,7 +185,7 @@
     pvDecoder_AmrWb_Init(&amrHandle, decoderBuf.get(), &decoderCookie);
     ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
 
-    string inputFile = gEnv->getRes() + GetParam();
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     mFpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -160,6 +198,10 @@
     ASSERT_EQ(decoderErr, 0) << "DecodeFrames returned error";
 
     sf_close(outFileHandle);
+    string refFilePath = gEnv->getRes() + std::get<1>(GetParam());
+    ASSERT_TRUE(compareBinaryFiles(refFilePath, OUTPUT_FILE))
+    << "Error, Binary file comparison failed: Output file "
+    << OUTPUT_FILE << " does not match the reference file " << refFilePath << ".";
 }
 
 TEST_P(AmrwbDecoderTest, ResetDecoderTest) {
@@ -173,7 +215,7 @@
     pvDecoder_AmrWb_Init(&amrHandle, decoderBuf.get(), &decoderCookie);
     ASSERT_NE(amrHandle, nullptr) << "Failed to initialize decoder";
 
-    string inputFile = gEnv->getRes() + GetParam();
+    string inputFile = gEnv->getRes() + std::get<0>(GetParam());
     mFpInput = fopen(inputFile.c_str(), "rb");
     ASSERT_NE(mFpInput, nullptr) << "Error opening input file " << inputFile;
 
@@ -198,8 +240,21 @@
 }
 
 INSTANTIATE_TEST_SUITE_P(AmrwbDecoderTestAll, AmrwbDecoderTest,
-                         ::testing::Values(("bbb_amrwb_1ch_14kbps_16000hz.amrwb"),
-                                           ("bbb_16000hz_1ch_9kbps_amrwb_30sec.amrwb")));
+                         ::testing::Values(std::make_tuple(
+                                                "bbb_amrwb_1ch_14kbps_16000hz.amrwb",
+                                                "bbb_amrwb_1ch_14kbps_16000hz_ref.pcm"),
+                                           std::make_tuple(
+                                                "bbb_16000hz_1ch_9kbps_amrwb_30sec.amrwb",
+                                                "bbb_16000hz_1ch_9kbps_amrwb_30sec_ref.pcm"),
+                                           std::make_tuple(
+                                                "bbb_16kHz_1ch_16bps_1sec.amrwb",
+                                                "bbb_16kHz_1ch_16bps_1sec_ref.pcm"),
+                                           std::make_tuple(
+                                                "bbb_16kHz_1ch_6.6bps_3sec.amrwb",
+                                                "bbb_16kHz_1ch_6.6bps_3sec_ref.pcm"),
+                                           std::make_tuple(
+                                                "bbb_16kHz_1ch_23.85bps_3sec.amrwb",
+                                                "bbb_16kHz_1ch_23.85bps_3sec_ref.pcm")));
 
 int main(int argc, char **argv) {
     gEnv = new AmrwbDecTestEnvironment();
diff --git a/media/module/codecs/amrwb/dec/test/AndroidTest.xml b/media/module/codecs/amrwb/dec/test/AndroidTest.xml
index 392df03..dbd1407 100644
--- a/media/module/codecs/amrwb/dec/test/AndroidTest.xml
+++ b/media/module/codecs/amrwb/dec/test/AndroidTest.xml
@@ -23,17 +23,17 @@
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
         <option name="target" value="host" />
         <option name="config-filename" value="AmrwbDecoderTest" />
-        <option name="version" value="1.0"/>
+        <option name="version" value="2.0"/>
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="AmrwbDecoderTest-1.0" />
+        <option name="media-folder-name" value="AmrwbDecoderTest-2.0" />
         <option name="dynamic-config-module" value="AmrwbDecoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrwbDecoderTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/AmrwbDecoderTest-1.0/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrwbDecoderTest-2.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrwb/dec/test/DynamicConfig.xml b/media/module/codecs/amrwb/dec/test/DynamicConfig.xml
index 506cc3d..52453ee 100644
--- a/media/module/codecs/amrwb/dec/test/DynamicConfig.xml
+++ b/media/module/codecs/amrwb/dec/test/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest-1.0.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest-2.0.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/module/codecs/amrwb/dec/test/README.md b/media/module/codecs/amrwb/dec/test/README.md
index 8e77456..ed76051 100644
--- a/media/module/codecs/amrwb/dec/test/README.md
+++ b/media/module/codecs/amrwb/dec/test/README.md
@@ -22,15 +22,15 @@
 adb push ${OUT}/data/nativetest/AmrwbDecoderTest/AmrwbDecoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest-1.0.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/dec/test/AmrwbDecoderTest-2.0.zip). Download, unzip and push these files into device for testing.
 
 ```
-adb push AmrwbDecoderTest-1.0 /data/local/tmp/
+adb push AmrwbDecoderTest-2.0 /data/local/tmp/
 ```
 
 usage: AmrwbDecoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrwbDecoderTest -P /data/local/tmp/AmrwbDecoderTest-1.0/
+adb shell /data/local/tmp/AmrwbDecoderTest -P /data/local/tmp/AmrwbDecoderTest-2.0/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/module/codecs/amrwb/enc/inc/basic_op.h b/media/module/codecs/amrwb/enc/inc/basic_op.h
index 80ad7f1..8e740b4 100644
--- a/media/module/codecs/amrwb/enc/inc/basic_op.h
+++ b/media/module/codecs/amrwb/enc/inc/basic_op.h
@@ -569,13 +569,10 @@
 static_vo Word32 L_add (Word32 L_var1, Word32 L_var2)
 {
     Word32 L_var_out;
-    L_var_out = L_var1 + L_var2;
-    if (((L_var1 ^ L_var2) & MIN_32) == 0)
+    if (__builtin_add_overflow(L_var1, L_var2, &L_var_out))
     {
-        if ((L_var_out ^ L_var1) & MIN_32)
-        {
-            L_var_out = (L_var1 < 0) ? MIN_32 : MAX_32;
-        }
+        // saturating...
+        L_var_out = (L_var1 < 0) ? MIN_32 : MAX_32;
     }
     return (L_var_out);
 }
@@ -616,13 +613,10 @@
 static_vo Word32 L_sub (Word32 L_var1, Word32 L_var2)
 {
     Word32 L_var_out;
-    L_var_out = L_var1 - L_var2;
-    if (((L_var1 ^ L_var2) & MIN_32) != 0)
+    if (__builtin_sub_overflow(L_var1, L_var2, &L_var_out))
     {
-        if ((L_var_out ^ L_var1) & MIN_32)
-        {
-            L_var_out = (L_var1 < 0L) ? MIN_32 : MAX_32;
-        }
+        // saturating...
+        L_var_out = (L_var1 < 0L) ? MIN_32 : MAX_32;
     }
     return (L_var_out);
 }
diff --git a/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp b/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
index 1a6ee27..dc9c1b1 100644
--- a/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
+++ b/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest.cpp
@@ -20,6 +20,7 @@
 #include <utils/Log.h>
 
 #include <stdio.h>
+#include <fstream>
 
 #include "cmnMemory.h"
 #include "voAMRWB.h"
@@ -34,13 +35,15 @@
 
 static AmrwbEncTestEnvironment *gEnv = nullptr;
 
-class AmrwbEncoderTest : public ::testing::TestWithParam<tuple<string, int32_t, VOAMRWBFRAMETYPE>> {
+class AmrwbEncoderTest : public ::testing::TestWithParam<tuple<string, int32_t,
+                                                               VOAMRWBFRAMETYPE, string>> {
   public:
     AmrwbEncoderTest() : mEncoderHandle(nullptr) {
-        tuple<string, int32_t, VOAMRWBFRAMETYPE> params = GetParam();
+        tuple<string, int32_t, VOAMRWBFRAMETYPE, string> params = GetParam();
         mInputFile = gEnv->getRes() + get<0>(params);
         mMode = get<1>(params);
         mFrameType = get<2>(params);
+        refFilePath = gEnv->getRes() + get<3>(params);
         mMemOperator.Alloc = cmnMemAlloc;
         mMemOperator.Copy = cmnMemCopy;
         mMemOperator.Free = cmnMemFree;
@@ -66,8 +69,47 @@
     VO_CODEC_INIT_USERDATA mUserData;
     VO_HANDLE mEncoderHandle;
     int32_t mMode;
+    string refFilePath;
+
+    bool compareBinaryFiles(const string& refFilePath, const string& outFilePath);
 };
 
+bool AmrwbEncoderTest::compareBinaryFiles(const std::string &refFilePath,
+                                          const std::string &outFilePath) {
+    std::ifstream refFile(refFilePath, std::ios::binary | std::ios::ate);
+    std::ifstream outFile(outFilePath, std::ios::binary | std::ios::ate);
+    assert(refFile.is_open() && "Error opening reference file " + refFilePath);
+    assert(outFile.is_open() && "Error opening output file " + outFilePath);
+
+    std::streamsize refFileSize = refFile.tellg();
+    std::streamsize outFileSize = outFile.tellg();
+    if (refFileSize != outFileSize) {
+        ALOGE("Error, File size mismatch: Reference file size = %td bytes,"
+               "but output file size = %td bytes", refFileSize, outFileSize);
+        return false;
+    }
+
+    refFile.seekg(0, std::ios::beg);
+    outFile.seekg(0, std::ios::beg);
+    constexpr std::streamsize kBufferSize = 16 * 1024;
+    char refBuffer[kBufferSize];
+    char outBuffer[kBufferSize];
+
+    while (refFile && outFile) {
+        refFile.read(refBuffer, kBufferSize);
+        outFile.read(outBuffer, kBufferSize);
+
+        std::streamsize refBytesRead = refFile.gcount();
+        std::streamsize outBytesRead = outFile.gcount();
+
+        if (refBytesRead != outBytesRead || memcmp(refBuffer, outBuffer, refBytesRead) != 0) {
+            ALOGE("Error, File content mismatch.");
+            return false;
+        }
+    }
+    return true;
+}
+
 TEST_P(AmrwbEncoderTest, CreateAmrwbEncoderTest) {
     int32_t status = voGetAMRWBEncAPI(&mApiHandle);
     ASSERT_EQ(status, VO_ERR_NONE) << "Failed to get api handle";
@@ -152,38 +194,69 @@
     if (fpOutput) {
         fclose(fpOutput);
     }
+
+    ASSERT_TRUE(compareBinaryFiles(refFilePath, OUTPUT_FILE))
+    << "Error, Binary file comparison failed: Output file "
+    << OUTPUT_FILE << " does not match the reference file " << refFilePath << ".";
 }
 
 INSTANTIATE_TEST_SUITE_P(
-        AmrwbEncoderTestAll, AmrwbEncoderTest,
-        ::testing::Values(
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_DEFAULT),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_ITU),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_RFC3267),
-                make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_RFC3267)));
+    AmrwbEncoderTestAll, AmrwbEncoderTest,
+    ::testing::Values(
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD66_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD885_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1265_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1425_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1585_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1825_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1985_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2305_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_DEFAULT,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2385_VOAMRWB_DEFAULT_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD66_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD885_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1265_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1425_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1585_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1825_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1985_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2305_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_ITU,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2385_VOAMRWB_ITU_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD66, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD66_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD885, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD885_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1265, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1265_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1425, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1425_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1585, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1585_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1825, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1825_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD1985, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD1985_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2305, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2305_VOAMRWB_RFC3267_ref.amrwb"),
+        make_tuple("bbb_raw_1ch_16khz_s16le.raw", VOAMRWB_MD2385, VOAMRWB_RFC3267,
+                    "bbb_raw_1ch_16khz_s16le_VOAMRWB_MD2385_VOAMRWB_RFC3267_ref.amrwb")));
 
 int main(int argc, char **argv) {
     gEnv = new AmrwbEncTestEnvironment();
diff --git a/media/module/codecs/amrwb/enc/test/AndroidTest.xml b/media/module/codecs/amrwb/enc/test/AndroidTest.xml
index 8822cb2..1f4121f 100644
--- a/media/module/codecs/amrwb/enc/test/AndroidTest.xml
+++ b/media/module/codecs/amrwb/enc/test/AndroidTest.xml
@@ -23,17 +23,17 @@
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.DynamicConfigPusher">
         <option name="target" value="host" />
         <option name="config-filename" value="AmrwbEncoderTest" />
-        <option name="version" value="1.0"/>
+        <option name="version" value="2.0"/>
     </target_preparer>
     <target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
         <option name="push-all" value="true" />
-        <option name="media-folder-name" value="AmrwbEncoderTest-1.0" />
+        <option name="media-folder-name" value="AmrwbEncoderTest-2.0" />
         <option name="dynamic-config-module" value="AmrwbEncoderTest" />
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
         <option name="native-test-device-path" value="/data/local/tmp" />
         <option name="module-name" value="AmrwbEncoderTest" />
-        <option name="native-test-flag" value="-P /sdcard/test/AmrwbEncoderTest-1.0/" />
+        <option name="native-test-flag" value="-P /sdcard/test/AmrwbEncoderTest-2.0/" />
     </test>
 </configuration>
diff --git a/media/module/codecs/amrwb/enc/test/DynamicConfig.xml b/media/module/codecs/amrwb/enc/test/DynamicConfig.xml
index a0b6218..59701ea 100644
--- a/media/module/codecs/amrwb/enc/test/DynamicConfig.xml
+++ b/media/module/codecs/amrwb/enc/test/DynamicConfig.xml
@@ -15,6 +15,6 @@
 
 <dynamicConfig>
     <entry key="media_files_url">
-            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest-1.0.zip</value>
+            <value>https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest-2.0.zip</value>
     </entry>
 </dynamicConfig>
diff --git a/media/module/codecs/amrwb/enc/test/README.md b/media/module/codecs/amrwb/enc/test/README.md
index 3b9cc39..ea2c31e 100644
--- a/media/module/codecs/amrwb/enc/test/README.md
+++ b/media/module/codecs/amrwb/enc/test/README.md
@@ -22,7 +22,7 @@
 adb push ${OUT}/data/nativetest/AmrwbEncoderTest/AmrwbEncoderTest /data/local/tmp/
 ```
 
-The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest-1.0.zip). Download, unzip and push these files into device for testing.
+The resource file for the tests is taken from [here](https://dl.google.com/android-unittest/media/frameworks/av/media/module/codecs/amrwb/enc/test/AmrwbEncoderTest-2.0.zip). Download, unzip and push these files into device for testing.
 
 ```
 adb push AmrwbEncoderTest-1.0 /data/local/tmp/
@@ -30,7 +30,7 @@
 
 usage: AmrwbEncoderTest -P \<path_to_folder\>
 ```
-adb shell /data/local/tmp/AmrwbEncoderTest -P /data/local/tmp/AmrwbEncoderTest-1.0/
+adb shell /data/local/tmp/AmrwbEncoderTest -P /data/local/tmp/AmrwbEncoderTest-2.0/
 ```
 Alternatively, the test can also be run using atest command.
 
diff --git a/media/module/codecserviceregistrant/Android.bp b/media/module/codecserviceregistrant/Android.bp
index becb98a..56cd8b8 100644
--- a/media/module/codecserviceregistrant/Android.bp
+++ b/media/module/codecserviceregistrant/Android.bp
@@ -55,6 +55,8 @@
         "com.android.media.swcodec",
     ],
 
+    export_include_dirs: ["include"],
+
     srcs: [
         "CodecServiceRegistrant.cpp",
     ],
diff --git a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
index 6df9dc8..433b5e9 100644
--- a/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
+++ b/media/module/codecserviceregistrant/CodecServiceRegistrant.cpp
@@ -40,7 +40,7 @@
 #include <codec2/aidl/ComponentStore.h>
 #include <codec2/aidl/ParamTypes.h>
 
-#include <media/CodecServiceRegistrant.h>
+#include <codecserviceregistrant/CodecServiceRegistrant.h>
 
 namespace /* unnamed */ {
 
@@ -775,13 +775,12 @@
     return nullptr;
 }
 
-extern "C" void RegisterCodecServices() {
+/**
+ * This function encapsulates the core logic required to register codec services,
+ * separated from threadpool management to avoid timeouts when called by the fuzzer.
+ */
+static void RegisterCodecServicesWithExistingThreadpool() {
     const bool aidlSelected = c2_aidl::utils::IsSelected();
-    constexpr int kThreadCount = 64;
-    ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
-    ABinderProcess_startThreadPool();
-    ::android::hardware::configureRpcThreadpool(kThreadCount, false);
-
     LOG(INFO) << "Creating software Codec2 service...";
     std::shared_ptr<C2ComponentStore> store =
         android::GetCodec2PlatformComponentStore();
@@ -791,47 +790,6 @@
     }
 
     using namespace ::android::hardware::media::c2;
-
-    if (!ionPropertiesDefined()) {
-        using IComponentStore =
-            ::android::hardware::media::c2::V1_0::IComponentStore;
-        std::string const preferredStoreName = "default";
-        if (aidlSelected) {
-            std::shared_ptr<c2_aidl::IComponentStore> preferredStore;
-            if (__builtin_available(android __ANDROID_API_S__, *)) {
-                std::string instanceName = ::android::base::StringPrintf(
-                        "%s/%s", c2_aidl::IComponentStore::descriptor, preferredStoreName.c_str());
-                if (AServiceManager_isDeclared(instanceName.c_str())) {
-                    preferredStore = c2_aidl::IComponentStore::fromBinder(::ndk::SpAIBinder(
-                            AServiceManager_waitForService(instanceName.c_str())));
-                }
-            }
-            if (preferredStore) {
-                ::android::SetPreferredCodec2ComponentStore(
-                        std::make_shared<H2C2ComponentStore>(preferredStore));
-                LOG(INFO) <<
-                        "Preferred Codec2 AIDL store is set to \"" <<
-                        preferredStoreName << "\".";
-            } else {
-                LOG(INFO) <<
-                        "Preferred Codec2 AIDL store is defaulted to \"software\".";
-            }
-        } else {
-            sp<IComponentStore> preferredStore =
-                IComponentStore::getService(preferredStoreName.c_str());
-            if (preferredStore) {
-                ::android::SetPreferredCodec2ComponentStore(
-                        std::make_shared<H2C2ComponentStore>(preferredStore));
-                LOG(INFO) <<
-                        "Preferred Codec2 HIDL store is set to \"" <<
-                        preferredStoreName << "\".";
-            } else {
-                LOG(INFO) <<
-                        "Preferred Codec2 HIDL store is defaulted to \"software\".";
-            }
-        }
-    }
-
     bool registered = false;
     const std::string aidlServiceName =
         std::string(c2_aidl::IComponentStore::descriptor) + "/software";
@@ -877,11 +835,61 @@
                      " so it is not being registered with hwservicemanager.";
     }
 
+    // Preferred store must be set after the store is registered to ensure that
+    // the correct preferred store is set.
+    if (!ionPropertiesDefined()) {
+        using IComponentStore =
+            ::android::hardware::media::c2::V1_0::IComponentStore;
+        std::string const preferredStoreName = "default";
+        if (aidlSelected) {
+            std::shared_ptr<c2_aidl::IComponentStore> preferredStore;
+            if (__builtin_available(android __ANDROID_API_S__, *)) {
+                std::string instanceName = ::android::base::StringPrintf(
+                        "%s/%s", c2_aidl::IComponentStore::descriptor, preferredStoreName.c_str());
+                if (AServiceManager_isDeclared(instanceName.c_str())) {
+                    preferredStore = c2_aidl::IComponentStore::fromBinder(::ndk::SpAIBinder(
+                            AServiceManager_waitForService(instanceName.c_str())));
+                }
+            }
+            if (preferredStore) {
+                ::android::SetPreferredCodec2ComponentStore(
+                        std::make_shared<H2C2ComponentStore>(preferredStore));
+                LOG(INFO) <<
+                        "Preferred Codec2 AIDL store is set to \"" <<
+                        preferredStoreName << "\".";
+            } else {
+                LOG(INFO) <<
+                        "Preferred Codec2 AIDL store is defaulted to \"software\".";
+            }
+        } else {
+            sp<IComponentStore> preferredStore =
+                IComponentStore::getService(preferredStoreName.c_str());
+            if (preferredStore) {
+                ::android::SetPreferredCodec2ComponentStore(
+                        std::make_shared<H2C2ComponentStore>(preferredStore));
+                LOG(INFO) <<
+                        "Preferred Codec2 HIDL store is set to \"" <<
+                        preferredStoreName << "\".";
+            } else {
+                LOG(INFO) <<
+                        "Preferred Codec2 HIDL store is defaulted to \"software\".";
+            }
+        }
+    }
+
     if (registered) {
         LOG(INFO) << "Software Codec2 service created and registered.";
     }
+}
+
+extern "C" void RegisterCodecServices() {
+    constexpr int kThreadCount = 64;
+    ABinderProcess_setThreadPoolMaxThreadCount(kThreadCount);
+    ABinderProcess_startThreadPool();
+    ::android::hardware::configureRpcThreadpool(kThreadCount, false);
+
+    RegisterCodecServicesWithExistingThreadpool();
 
     ABinderProcess_joinThreadPool();
     ::android::hardware::joinRpcThreadpool();
 }
-
diff --git a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
index 4868e0c..0baf1ca 100644
--- a/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
+++ b/media/module/codecserviceregistrant/fuzzer/codecServiceRegistrant_fuzzer.cpp
@@ -13,6 +13,9 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+#include <codecserviceregistrant/CodecServiceRegistrant.h>
+
 #include "../CodecServiceRegistrant.cpp"
 #include "fuzzer/FuzzedDataProvider.h"
 #include <C2Config.h>
@@ -166,9 +169,9 @@
 void CodecServiceRegistrantFuzzer::process(const uint8_t *data, size_t size) {
   mFDP = new FuzzedDataProvider(data, size);
   invokeH2C2ComponentStore();
-  /** RegisterCodecServices is called here to improve code coverage */
-  /** as currently it is not called by codecServiceRegistrant       */
-  RegisterCodecServices();
+  /** RegisterCodecServicesWithExistingThreadpool() is called here to improve
+   * code coverage as currently it is not called in codecServiceRegistrant.cpp */
+  RegisterCodecServicesWithExistingThreadpool();
   delete mFDP;
 }
 
diff --git a/media/libmedia/include/media/CodecServiceRegistrant.h b/media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
similarity index 77%
rename from media/libmedia/include/media/CodecServiceRegistrant.h
rename to media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
index e0af781..1c6f71f 100644
--- a/media/libmedia/include/media/CodecServiceRegistrant.h
+++ b/media/module/codecserviceregistrant/include/codecserviceregistrant/CodecServiceRegistrant.h
@@ -18,6 +18,13 @@
 
 #define CODEC_SERVICE_REGISTRANT_H_
 
+/**
+ * This function starts the threadpool, calls the registration logic
+ * encapsulated in RegisterCodecServicesWithExistingThreadpool(), and
+ * then stops the threadpool.
+ */
+extern "C" void RegisterCodecServices();
+
 typedef void (*RegisterCodecServicesFunc)();
 
 #endif  // CODEC_SERVICE_REGISTRANT_H_
diff --git a/media/module/extractors/mp4/MPEG4Extractor.cpp b/media/module/extractors/mp4/MPEG4Extractor.cpp
index b3707c8..f247f8c 100644
--- a/media/module/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/module/extractors/mp4/MPEG4Extractor.cpp
@@ -523,11 +523,10 @@
     }
 
     [this, &track] {
-        int64_t duration;
+        int64_t duration = track->mMdhdDurationUs;
         int32_t samplerate;
         // Only for audio track.
-        if (track->elst_needs_processing && mHeaderTimescale != 0 &&
-            AMediaFormat_getInt64(track->meta, AMEDIAFORMAT_KEY_DURATION, &duration) &&
+        if (track->elst_needs_processing && mHeaderTimescale != 0 && duration != 0 &&
             AMediaFormat_getInt32(track->meta, AMEDIAFORMAT_KEY_SAMPLE_RATE, &samplerate)) {
             // Elst has to be processed only the first time this function is called.
             track->elst_needs_processing = false;
@@ -1645,7 +1644,10 @@
                           (long long) duration, (long long) mLastTrack->timescale);
                     return ERROR_MALFORMED;
                 }
-                AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+                // Store this track's mdhd duration to calculate the padding.
+                mLastTrack->mMdhdDurationUs = (int64_t)durationUs;
+            } else {
+                mLastTrack->mMdhdDurationUs = 0;
             }
 
             uint8_t lang[2];
@@ -3907,17 +3909,18 @@
     }
 
     int32_t id;
+    int64_t duration;
 
     if (version == 1) {
         // we can get ctime value from U64_AT(&buffer[4])
         // we can get mtime value from U64_AT(&buffer[12])
         id = U32_AT(&buffer[20]);
-        // we can get duration value from U64_AT(&buffer[28])
+        duration = U64_AT(&buffer[28]);
     } else if (version == 0) {
         // we can get ctime value from U32_AT(&buffer[4])
         // we can get mtime value from U32_AT(&buffer[8])
         id = U32_AT(&buffer[12]);
-        // we can get duration value from U32_AT(&buffer[20])
+        duration = U32_AT(&buffer[20]);
     } else {
         return ERROR_UNSUPPORTED;
     }
@@ -3926,6 +3929,15 @@
         return ERROR_MALFORMED;
 
     AMediaFormat_setInt32(mLastTrack->meta, AMEDIAFORMAT_KEY_TRACK_ID, id);
+    if (duration != 0 && mHeaderTimescale != 0) {
+        long double durationUs = ((long double)duration * 1000000) / mHeaderTimescale;
+        if (durationUs < 0 || durationUs > INT64_MAX) {
+            ALOGE("cannot represent %lld * 1000000 / %lld in 64 bits",
+                  (long long) duration, (long long) mHeaderTimescale);
+            return ERROR_MALFORMED;
+        }
+        AMediaFormat_setInt64(mLastTrack->meta, AMEDIAFORMAT_KEY_DURATION, durationUs);
+    }
 
     size_t matrixOffset = dynSize + 16;
     int32_t a00 = U32_AT(&buffer[matrixOffset]);
diff --git a/media/module/extractors/mp4/include/MPEG4Extractor.h b/media/module/extractors/mp4/include/MPEG4Extractor.h
index 542a3e6..59626f6 100644
--- a/media/module/extractors/mp4/include/MPEG4Extractor.h
+++ b/media/module/extractors/mp4/include/MPEG4Extractor.h
@@ -96,7 +96,7 @@
 
         uint8_t *mTx3gBuffer;
         size_t mTx3gSize, mTx3gFilled;
-
+        int64_t mMdhdDurationUs;
 
         Track() {
             next = NULL;
diff --git a/media/module/foundation/Android.bp b/media/module/foundation/Android.bp
index dc8384d..edf4cb5 100644
--- a/media/module/foundation/Android.bp
+++ b/media/module/foundation/Android.bp
@@ -33,9 +33,6 @@
 cc_defaults {
     name: "libstagefright_foundation_defaults",
     vendor_available: true,
-    vndk: {
-        enabled: true,
-    },
     host_supported: true,
     double_loadable: true,
 
diff --git a/media/module/foundation/tests/AData_test.cpp b/media/module/foundation/tests/AData_test.cpp
index 2628a47..87b69a6 100644
--- a/media/module/foundation/tests/AData_test.cpp
+++ b/media/module/foundation/tests/AData_test.cpp
@@ -392,7 +392,7 @@
     EXPECT_EQ(2L, _shared.use_count()); // still both u and _shared contains the object
 
     EXPECT_TRUE(u.clear());
-    EXPECT_TRUE(_shared.unique()); // now only _shared contains the object
+    EXPECT_EQ(1L, _shared.use_count()); // now only _shared contains the object
 
     EXPECT_TRUE(u.set(_constShared));
     EXPECT_EQ(2L, _constShared.use_count()); // even though it is const, we can add a use count
@@ -591,7 +591,7 @@
     EXPECT_EQ(2L, _shared.use_count()); // still both u and _shared contains the object
 
     EXPECT_TRUE(u.clear());
-    EXPECT_TRUE(_shared.unique()); // now only _shared contains the object
+    EXPECT_EQ(1L, _shared.use_count()); // now only _shared contains the object
 
     EXPECT_TRUE(u.set(_constShared));
     EXPECT_EQ(2L, _constShared.use_count()); // even though it is const, we can add a use count
diff --git a/media/module/id3/Android.bp b/media/module/id3/Android.bp
index bea3e34..e426796 100644
--- a/media/module/id3/Android.bp
+++ b/media/module/id3/Android.bp
@@ -17,6 +17,24 @@
     ],
 }
 
+cc_library_headers {
+    name: "libstagefright_id3_headers",
+    export_include_dirs: ["include"],
+    vendor_available: true,
+    apex_available: [
+        "//apex_available:platform",
+        "com.android.media",
+        "com.android.media.swcodec",
+    ],
+    min_sdk_version: "29",
+    host_supported: true,
+    target: {
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
 cc_library_static {
     name: "libstagefright_id3",
     min_sdk_version: "29",
@@ -25,7 +43,6 @@
         "com.android.media",
     ],
 
-
     srcs: ["ID3.cpp"],
 
     header_libs: [
@@ -35,6 +52,8 @@
         "media_ndk_headers",
     ],
 
+    export_include_dirs: ["include"],
+
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/include/ID3.h b/media/module/id3/include/ID3.h
similarity index 100%
rename from media/libstagefright/include/ID3.h
rename to media/module/id3/include/ID3.h
diff --git a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
index af53f64..43a4628 100644
--- a/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
+++ b/media/module/libmediatranscoding/TranscodingResourcePolicy.cpp
@@ -21,6 +21,7 @@
 #include <aidl/android/media/IResourceObserverService.h>
 #include <android/binder_manager.h>
 #include <android/binder_process.h>
+#include <map>
 #include <media/TranscodingResourcePolicy.h>
 #include <utils/Log.h>
 
@@ -66,11 +67,31 @@
     TranscodingResourcePolicy* mOwner;
 };
 
+// cookie used for death recipients. The TranscodingResourcePolicy
+// that this cookie is associated with must outlive this cookie. It is
+// either deleted by binderDied, or in unregisterSelf which is also called
+// in the destructor of TranscodingResourcePolicy
+class TranscodingResourcePolicyCookie {
+ public:
+    TranscodingResourcePolicyCookie(TranscodingResourcePolicy* policy) : mPolicy(policy) {}
+    TranscodingResourcePolicyCookie() = delete;
+    TranscodingResourcePolicy* mPolicy;
+};
+
+static std::map<uintptr_t, std::unique_ptr<TranscodingResourcePolicyCookie>> sCookies;
+static uintptr_t sCookieKeyCounter;
+static std::mutex sCookiesMutex;
+
 // static
 void TranscodingResourcePolicy::BinderDiedCallback(void* cookie) {
-    TranscodingResourcePolicy* owner = reinterpret_cast<TranscodingResourcePolicy*>(cookie);
-    if (owner != nullptr) {
-        owner->unregisterSelf();
+    std::lock_guard<std::mutex> guard(sCookiesMutex);
+    if (auto it = sCookies.find(reinterpret_cast<uintptr_t>(cookie)); it != sCookies.end()) {
+        ALOGI("BinderDiedCallback unregistering TranscodingResourcePolicy");
+        auto policy = reinterpret_cast<TranscodingResourcePolicy*>(it->second->mPolicy);
+        if (policy) {
+            policy->unregisterSelf();
+        }
+        sCookies.erase(it);
     }
     // TODO(chz): retry to connecting to IResourceObserverService after failure.
     // Also need to have back-up logic if IResourceObserverService is offline for
@@ -88,6 +109,23 @@
 }
 
 TranscodingResourcePolicy::~TranscodingResourcePolicy() {
+    {
+        std::lock_guard<std::mutex> guard(sCookiesMutex);
+
+        // delete all of the cookies associated with this TranscodingResourcePolicy
+        // instance since they are holding pointers to this object that will no
+        // longer be valid.
+        std::erase_if(sCookies, [this](const auto& cookieEntry) {
+            auto const& [key, cookie] = cookieEntry;
+            std::lock_guard guard(mCookieKeysLock);
+            if (const auto& it = mCookieKeys.find(key); it != mCookieKeys.end()) {
+                // No longer need to track this cookie
+                mCookieKeys.erase(key);
+                return true;
+            }
+            return false;
+        });
+    }
     unregisterSelf();
 }
 
@@ -123,7 +161,17 @@
         return;
     }
 
-    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
+    std::unique_ptr<TranscodingResourcePolicyCookie> cookie =
+            std::make_unique<TranscodingResourcePolicyCookie>(this);
+    void* cookiePtr = static_cast<void*>(cookie.get());
+    uintptr_t cookieKey = sCookieKeyCounter++;
+    sCookies.emplace(cookieKey, std::move(cookie));
+    {
+        std::lock_guard guard(mCookieKeysLock);
+        mCookieKeys.insert(cookieKey);
+    }
+
+    AIBinder_linkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(cookieKey));
 
     ALOGD("@@@ registered observer");
     mRegistered = true;
@@ -141,7 +189,6 @@
     ::ndk::SpAIBinder binder = mService->asBinder();
     if (binder.get() != nullptr) {
         Status status = mService->unregisterObserver(mObserver);
-        AIBinder_unlinkToDeath(binder.get(), mDeathRecipient.get(), reinterpret_cast<void*>(this));
     }
 
     mService = nullptr;
diff --git a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
index ee232e7..4d762b5 100644
--- a/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
+++ b/media/module/libmediatranscoding/include/media/TranscodingResourcePolicy.h
@@ -22,6 +22,7 @@
 #include <utils/Condition.h>
 
 #include <mutex>
+#include <set>
 namespace aidl {
 namespace android {
 namespace media {
@@ -48,6 +49,8 @@
     bool mRegistered GUARDED_BY(mRegisteredLock);
     std::shared_ptr<IResourceObserverService> mService GUARDED_BY(mRegisteredLock);
     std::shared_ptr<ResourceObserver> mObserver;
+    mutable std::mutex mCookieKeysLock;
+    std::set<uintptr_t> mCookieKeys;
 
     mutable std::mutex mCallbackLock;
     std::weak_ptr<ResourcePolicyCallbackInterface> mResourcePolicyCallback
@@ -59,6 +62,7 @@
     static void BinderDiedCallback(void* cookie);
 
     void registerSelf();
+    // must delete the associated TranscodingResourcePolicyCookie any time this is called
     void unregisterSelf();
     void onResourceAvailable(pid_t pid);
 };  // class TranscodingUidPolicy
diff --git a/media/mtp/MtpFfsHandle.cpp b/media/mtp/MtpFfsHandle.cpp
index 5d68890..979edab 100644
--- a/media/mtp/MtpFfsHandle.cpp
+++ b/media/mtp/MtpFfsHandle.cpp
@@ -297,9 +297,10 @@
 }
 
 void MtpFfsHandle::close() {
-    auto timeout = std::chrono::seconds(2);
-    std::unique_lock lk(m);
-    cv.wait_for(lk, timeout ,[this]{return child_threads==0;});
+    // Join all child threads before destruction
+    for (auto& thread : mChildThreads) {
+        thread.join();
+    }
 
     io_destroy(mCtx);
     closeEndpoints();
@@ -677,12 +678,10 @@
     memcpy(temp, me.data, me.length);
     me.data = temp;
 
-    std::unique_lock lk(m);
-    child_threads++;
-    lk.unlock();
-
     std::thread t([this, me]() { return this->doSendEvent(me); });
-    t.detach();
+
+    // Store the thread object for later joining
+    mChildThreads.emplace_back(std::move(t));
     return 0;
 }
 
@@ -692,11 +691,6 @@
     if (static_cast<unsigned>(ret) != length)
         PLOG(ERROR) << "Mtp error sending event thread!";
     delete[] reinterpret_cast<char*>(me.data);
-
-    std::unique_lock lk(m);
-    child_threads--;
-    lk.unlock();
-    cv.notify_one();
 }
 
 } // namespace android
diff --git a/media/mtp/MtpFfsHandle.h b/media/mtp/MtpFfsHandle.h
index 51cdef0..8f4b769 100644
--- a/media/mtp/MtpFfsHandle.h
+++ b/media/mtp/MtpFfsHandle.h
@@ -60,9 +60,7 @@
     bool mCanceled;
     bool mBatchCancel;
 
-    std::mutex m;
-    std::condition_variable cv;
-    std::atomic<int> child_threads{0};
+    std::vector<std::thread> mChildThreads;
 
     android::base::unique_fd mControl;
     // "in" from the host's perspective => sink for mtp server
diff --git a/media/mtp/MtpServer.cpp b/media/mtp/MtpServer.cpp
index d917772..80fe51a 100644
--- a/media/mtp/MtpServer.cpp
+++ b/media/mtp/MtpServer.cpp
@@ -132,6 +132,10 @@
 }
 
 MtpServer::~MtpServer() {
+    if (mHandle) {
+        delete mHandle;
+        mHandle = NULL;
+    }
 }
 
 void MtpServer::addStorage(MtpStorage* storage) {
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
index acae06a..2e9c58b 100644
--- a/media/mtp/tests/MtpFuzzer/Android.bp
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -29,7 +29,6 @@
         "liblog",
         "libutils",
     ],
-    static_libs: ["libc++fs",],
     cflags: [
         "-Wall",
         "-Wextra",
diff --git a/media/ndk/Android.bp b/media/ndk/Android.bp
index 9ec7700..b250a03 100644
--- a/media/ndk/Android.bp
+++ b/media/ndk/Android.bp
@@ -52,9 +52,6 @@
     symbol_file: "libmediandk.map.txt",
     first_version: "21",
     unversioned_until: "current",
-    export_header_libs: [
-        "libmediandk_headers",
-    ],
 }
 
 ndk_headers {
@@ -192,7 +189,6 @@
     header_libs: [
         "libstagefright_headers",
         "libmedia_headers",
-        "libstagefright_headers",
     ],
 
     shared_libs: [
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 7b19ac0..995c674 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -22,13 +22,15 @@
 #include "NdkImagePriv.h"
 #include "NdkImageReaderPriv.h"
 
-#include <cutils/atomic.h>
-#include <utils/Log.h>
 #include <android_media_Utils.h>
-#include <ui/PublicFormat.h>
-#include <private/android/AHardwareBufferHelpers.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <grallocusage/GrallocUsageConversion.h>
 #include <gui/bufferqueue/1.0/WGraphicBufferProducer.h>
+#include <private/android/AHardwareBufferHelpers.h>
+#include <ui/PublicFormat.h>
+#include <utils/Log.h>
+
+#include <cutils/atomic.h>
 
 using namespace android;
 
@@ -291,22 +293,30 @@
 AImageReader::init() {
     mHalUsage = AHardwareBuffer_convertToGrallocUsageBits(mUsage);
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     sp<IGraphicBufferProducer> gbProducer;
     sp<IGraphicBufferConsumer> gbConsumer;
     BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     String8 consumerName = String8::format("ImageReader-%dx%df%xu%" PRIu64 "m%d-%d-%d",
             mWidth, mHeight, mFormat, mUsage, mMaxImages, getpid(),
             createProcessUniqueId());
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mBufferItemConsumer = new BufferItemConsumer(mHalUsage, mMaxImages, /*controlledByApp*/ true);
+#else
     mBufferItemConsumer =
             new BufferItemConsumer(gbConsumer, mHalUsage, mMaxImages, /*controlledByApp*/ true);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     if (mBufferItemConsumer == nullptr) {
         ALOGE("Failed to allocate BufferItemConsumer");
         return AMEDIA_ERROR_UNKNOWN;
     }
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mProducer = gbProducer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mBufferItemConsumer->setName(consumerName);
     mBufferItemConsumer->setFrameAvailableListener(mFrameListener);
     mBufferItemConsumer->setBufferFreedListener(mBufferRemovedListener);
@@ -328,10 +338,18 @@
         return AMEDIA_ERROR_UNKNOWN;
     }
     if (mUsage & AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mBufferItemConsumer->setConsumerIsProtected(true);
+#else
         gbConsumer->setConsumerIsProtected(true);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mSurface = mBufferItemConsumer->getSurface();
+#else
     mSurface = new Surface(mProducer, /*controlledByApp*/true);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     if (mSurface == nullptr) {
         ALOGE("Failed to create surface");
         return AMEDIA_ERROR_UNKNOWN;
@@ -578,8 +596,13 @@
         *handle = mWindowHandle;
         return AMEDIA_OK;
     }
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    sp<HGraphicBufferProducer> hgbp = new TWGraphicBufferProducer<HGraphicBufferProducer>(
+            mSurface->getIGraphicBufferProducer());
+#else
     sp<HGraphicBufferProducer> hgbp =
         new TWGraphicBufferProducer<HGraphicBufferProducer>(mProducer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     HalToken halToken;
     if (!createHalToken(hgbp, &halToken)) {
         return AMEDIA_ERROR_UNKNOWN;
diff --git a/media/ndk/NdkImageReaderPriv.h b/media/ndk/NdkImageReaderPriv.h
index 0199616..985f42b 100644
--- a/media/ndk/NdkImageReaderPriv.h
+++ b/media/ndk/NdkImageReaderPriv.h
@@ -25,6 +25,7 @@
 #include <utils/Mutex.h>
 #include <utils/StrongPointer.h>
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/Surface.h>
@@ -161,7 +162,9 @@
 
     uint64_t mHalUsage;
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     sp<IGraphicBufferProducer> mProducer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     sp<Surface>                mSurface;
     sp<BufferItemConsumer>     mBufferItemConsumer;
     sp<ANativeWindow>          mWindow;
diff --git a/media/ndk/include/media/NdkMediaDataSource.h b/media/ndk/include/media/NdkMediaDataSource.h
index 197e202..def142c 100644
--- a/media/ndk/include/media/NdkMediaDataSource.h
+++ b/media/ndk/include/media/NdkMediaDataSource.h
@@ -49,16 +49,16 @@
 /*
  * AMediaDataSource's callbacks will be invoked on an implementation-defined thread
  * or thread pool. No guarantees are provided about which thread(s) will be used for
- * callbacks. For example, |close| can be invoked from a different thread than the
- * thread invoking |readAt|. As such, the Implementations of AMediaDataSource callbacks
+ * callbacks. For example, `close` can be invoked from a different thread than the
+ * thread invoking `readAt`. As such, the Implementations of AMediaDataSource callbacks
  * must be threadsafe.
  */
 
 /**
- * Called to request data from the given |offset|.
+ * Called to request data from the given `offset`.
  *
- * Implementations should should write up to |size| bytes into
- * |buffer|, and return the number of bytes written.
+ * Implementations should should write up to `size` bytes into
+ * `buffer`, and return the number of bytes written.
  *
  * Return 0 if size is zero (thus no bytes are read).
  *
@@ -78,9 +78,9 @@
  * Called to close the data source, unblock reads, and release associated
  * resources.
  *
- * The NDK media framework guarantees that after the first |close| is
+ * The NDK media framework guarantees that after the first `close` is
  * called, no future callbacks will be invoked on the data source except
- * for |close| itself.
+ * for `close` itself.
  *
  * Closing a data source allows readAt calls that were blocked waiting
  * for I/O data to return promptly.
@@ -101,7 +101,7 @@
 
 /**
  * Called to get an estimate of the number of bytes that can be read from this data source
- * starting at |offset| without blocking for I/O.
+ * starting at `offset` without blocking for I/O.
  *
  * Return -1 when such an estimate is not possible.
  */
@@ -111,10 +111,10 @@
  * Create new media data source. Returns NULL if memory allocation
  * for the new data source object fails.
  *
- * Set the |uri| from which the data source will read,
+ * Set the `uri` from which the data source will read,
  * plus additional http headers when initiating the request.
  *
- * Headers will contain corresponding items from |key_values|
+ * Headers will contain corresponding items from `key_values`
  * in the following fashion:
  *
  * key_values[0]:key_values[1]
diff --git a/media/psh_utils/Android.bp b/media/psh_utils/Android.bp
new file mode 100644
index 0000000..dafa63b
--- /dev/null
+++ b/media/psh_utils/Android.bp
@@ -0,0 +1,52 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+// libraries that are included whole_static for test apps
+ndk_libs = [
+    "android.hardware.health-V3-ndk",
+    "android.hardware.power.stats-V1-ndk",
+]
+
+// Power, System, Health utils
+cc_library {
+    name: "libpshutils",
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+    srcs: [
+        "AudioPowerManager.cpp",
+        "AudioToken.cpp",
+        "HealthStats.cpp",
+        "HealthStatsProvider.cpp",
+        "PowerClientStats.cpp",
+        "PowerStats.cpp",
+        "PowerStatsCollector.cpp",
+        "PowerStatsProvider.cpp",
+    ],
+    shared_libs: [
+        "com.android.media.audio-aconfig-cc",
+        "libaudioutils",
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
+    shared: {
+        shared_libs: ndk_libs,
+    },
+    static: {
+        whole_static_libs: ndk_libs,
+    },
+}
diff --git a/media/psh_utils/AudioPowerManager.cpp b/media/psh_utils/AudioPowerManager.cpp
new file mode 100644
index 0000000..3ae681a
--- /dev/null
+++ b/media/psh_utils/AudioPowerManager.cpp
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "AudioToken.h"
+#define LOG_TAG "AudioPowerManager"
+#include <com_android_media_audioserver.h>
+#include <cutils/properties.h>
+#include <utils/Log.h>
+#include <psh_utils/AudioPowerManager.h>
+
+namespace android::media::psh_utils {
+
+/* static */
+AudioPowerManager& AudioPowerManager::getAudioPowerManager() {
+    [[clang::no_destroy]] static AudioPowerManager apm;
+    return apm;
+}
+
+std::unique_ptr<Token> AudioPowerManager::startClient(pid_t pid, uid_t uid,
+        const std::string& additional) {
+    std::shared_ptr<PowerClientStats> powerClientStats;
+    std::lock_guard l(mMutex);
+    if (mPowerClientStats.count(uid) == 0) {
+        const auto it = mHistoricalClients.find(uid);
+        if (it == mHistoricalClients.end()) {
+            powerClientStats = std::make_shared<PowerClientStats>(uid, additional);
+        } else {
+            powerClientStats = it->second;
+            mHistoricalClients.erase(it);
+        }
+        mPowerClientStats[uid] = powerClientStats;
+    } else {
+        powerClientStats = mPowerClientStats[uid];
+    }
+    powerClientStats->addPid(pid);
+    mPidToUid[pid] = uid;
+    std::unique_ptr<Token> token =
+            std::make_unique<AudioClientToken>(powerClientStats, pid, uid, additional);
+    mOutstandingTokens.emplace(token.get());
+    return token;
+}
+
+std::unique_ptr<Token> AudioPowerManager::startTrack(uid_t uid, const std::string& additional) {
+    std::lock_guard l(mMutex);
+    if (mPowerClientStats.count(uid) == 0) {
+        ALOGW("%s: Cannot find uid: %d", __func__, uid);
+        return {};
+    }
+    auto powerClientStats = mPowerClientStats[uid];
+    std::unique_ptr<Token> token =
+            std::make_unique<AudioTrackToken>(powerClientStats, additional);
+    mOutstandingTokens.emplace(token.get());
+    return token;
+}
+
+std::unique_ptr<Token> AudioPowerManager::startThread(
+        pid_t pid, const std::string& wakeLockName,
+        WakeFlag wakeFlag, const std::string& additional) {
+    std::lock_guard l(mMutex);
+    std::unique_ptr<Token> token =
+            std::make_unique<AudioThreadToken>(pid, wakeLockName, wakeFlag, additional);
+    mOutstandingTokens.emplace(token.get());
+    return token;
+}
+
+std::string AudioPowerManager::toString() const {
+    const std::string prefix("  ");
+    std::string result;
+    std::lock_guard l(mMutex);
+    result.append("Power Tokens:\n");
+    std::vector<std::string> tokenInfo;
+    for (const auto& token: mOutstandingTokens) {
+        tokenInfo.emplace_back(token->toString());
+    }
+    std::sort(tokenInfo.begin(), tokenInfo.end());
+    for (const auto& info: tokenInfo) {
+        result.append(prefix).append(info).append("\n");
+    }
+    result.append("Power Clients:\n");
+    for (const auto& [uid, powerClientStats]: mPowerClientStats) {
+        result.append(powerClientStats->toString(true, prefix));
+    }
+    result.append("Power Client History:\n");
+    for (const auto& [power, powerClientStats]: mHistoricalClients) {
+        result.append(powerClientStats->toString(true, prefix));
+    }
+    return result;
+}
+
+void AudioPowerManager::stopClient(pid_t pid) {
+    std::lock_guard l(mMutex);
+    const auto pidit = mPidToUid.find(pid);
+    if (pidit == mPidToUid.end()) return;
+    const uid_t uid = pidit->second;
+    const auto it = mPowerClientStats.find(uid);
+    if (it == mPowerClientStats.end()) return;
+
+    auto powerClientStats = it->second;
+    size_t count = powerClientStats->removePid(pid);
+    if (count == 0) {
+        mHistoricalClients[uid] = powerClientStats;
+        mPowerClientStats.erase(it);
+        if (mHistoricalClients.size() > kHistory) {
+            mHistoricalClients.erase(mHistoricalClients.begin()); // remove oldest.
+        }
+    }
+    mPidToUid.erase(pid);
+}
+
+void AudioPowerManager::clear_token_ptr(Token* token) {
+    if (token != nullptr) {
+        std::lock_guard l(mMutex);
+        (void)mOutstandingTokens.erase(token);
+    }
+}
+
+/* static */
+bool AudioPowerManager::enabled() {
+    static const bool enabled = com::android::media::audioserver::power_stats()
+            && property_get_bool("persist.audio.power_stats.enabled", false);
+    return enabled;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/AudioToken.cpp b/media/psh_utils/AudioToken.cpp
new file mode 100644
index 0000000..f7bf382
--- /dev/null
+++ b/media/psh_utils/AudioToken.cpp
@@ -0,0 +1,155 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "AudioToken"
+#include <android-base/logging.h>
+#include <utils/Log.h>
+#include "AudioToken.h"
+#include <psh_utils/AudioPowerManager.h>
+
+namespace android::media::psh_utils {
+
+/* static */
+constinit std::atomic<size_t> AudioClientToken::sIdCounter{};
+
+AudioClientToken::AudioClientToken(
+        std::shared_ptr<PowerClientStats> powerClientStats, pid_t pid, uid_t uid,
+        const std::string& additional)
+    : mPowerClientStats(std::move(powerClientStats))
+    , mPid(pid)
+    , mAdditional(additional)
+    , mId(sIdCounter.fetch_add(1, std::memory_order_relaxed)) {
+        (void)uid;
+}
+
+AudioClientToken::~AudioClientToken() {
+    auto& apm = AudioPowerManager::getAudioPowerManager();
+
+    // APM has a back pointer to AudioToken, which is accessible on toString().
+    // We first remove ourselves to prevent use after free.
+    apm.clear_token_ptr(this);
+
+    // The client token is released when it is no longer registered with AudioFlinger.
+    // However, it is possible that AudioTrackTokens are still active when the client is released
+    // after crashing and some of its tracks are draining.  Those track tokens also
+    // maintain a pointer to the PowerClientStats keeping that consistent.
+
+    // Stopping the client moves its PowerClientStats from active to historical
+    // if it is the last pid associated with the client uid.
+    apm.stopClient(mPid);
+}
+
+std::string AudioClientToken::toString() const {
+    std::string result("Client-");
+    result.append(std::to_string(mId)).append(": ")
+            .append(" pid: ").append(std::to_string(mPid));
+    if (!mAdditional.empty()) {
+        result.append(" ").append(mAdditional);
+    }
+    return result;
+}
+
+std::unique_ptr<Token> createAudioClientToken(pid_t pid, uid_t uid,
+        const std::string& additional) {
+    return AudioPowerManager::getAudioPowerManager().startClient(pid, uid, additional);
+}
+
+/* static */
+constinit std::atomic<size_t> AudioThreadToken::sIdCounter{};
+
+AudioThreadToken::AudioThreadToken(
+        pid_t tid, const std::string& wakeLockName,
+        WakeFlag wakeFlag, const std::string& additional)
+    : mTid(tid)
+    , mWakeLockName(wakeLockName)
+    , mWakeFlag(wakeFlag)
+    , mAdditional(additional)
+    , mId(sIdCounter.fetch_add(1, std::memory_order_relaxed)) {
+}
+
+AudioThreadToken::~AudioThreadToken() {
+    auto& apm = AudioPowerManager::getAudioPowerManager();
+
+    // APM has a back pointer to AudioToken, which is accessible on toString().
+    // We first remove ourselves to prevent use after free.
+    apm.clear_token_ptr(this);
+}
+
+std::string AudioThreadToken::toString() const {
+    std::string result("Thread-");
+    result.append(std::to_string(mId)).append(": ")
+            .append(" ThreadBase-tid: ").append(std::to_string(mTid))
+            .append(" wakeLockName: ").append(mWakeLockName)
+            .append(" wakeFlag: ").append(::android::media::psh_utils::toString(mWakeFlag));
+    if (!mAdditional.empty()) {
+        result.append(" ").append(mAdditional);
+    }
+    return result;
+}
+
+std::unique_ptr<Token> createAudioThreadToken(
+        pid_t pid, const std::string& wakeLockName,
+        WakeFlag wakeFlag, const std::string& additional) {
+    return AudioPowerManager::getAudioPowerManager().startThread(
+            pid, wakeLockName, wakeFlag, additional);
+}
+
+/* static */
+constinit std::atomic<size_t> AudioTrackToken::sIdCounter{};
+
+AudioTrackToken::AudioTrackToken(
+        std::shared_ptr<PowerClientStats> powerClientStats, const std::string& additional)
+    : mPowerClientStats(std::move(powerClientStats))
+    , mAdditional(additional)
+    , mId(sIdCounter.fetch_add(1, std::memory_order_relaxed)) {
+        if (mPowerClientStats){
+            mPowerClientStats->getCommandThread().add(
+                    "start",
+                    [pas = mPowerClientStats, actualNs = systemTime(SYSTEM_TIME_BOOTTIME)]() {
+                        pas->start(actualNs);
+                    });
+        }
+}
+
+AudioTrackToken::~AudioTrackToken() {
+    // APM has a back pointer to AudioToken, which is accessible on toString().
+    // We first remove ourselves to prevent use after free.
+    AudioPowerManager::getAudioPowerManager().clear_token_ptr(this);
+    if (mPowerClientStats) {
+        mPowerClientStats->getCommandThread().add(
+                "stop",
+                [pas = mPowerClientStats, actualNs = systemTime(SYSTEM_TIME_BOOTTIME)]() {
+                    pas->stop(actualNs);
+                });
+    }
+}
+
+std::string AudioTrackToken::toString() const {
+    std::string result("Track-");
+    result.append(std::to_string(mId)).append(": ")
+            .append(mPowerClientStats ? mPowerClientStats->toString() : std::string("null"));
+    if (!mAdditional.empty()) {
+        result.append(" ").append(mAdditional);
+    }
+    return result;
+}
+
+std::unique_ptr<Token> createAudioTrackToken(uid_t uid, const std::string& additional) {
+    return AudioPowerManager::getAudioPowerManager().startTrack(uid, additional);
+}
+
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/AudioToken.h b/media/psh_utils/AudioToken.h
new file mode 100644
index 0000000..aa25b04
--- /dev/null
+++ b/media/psh_utils/AudioToken.h
@@ -0,0 +1,83 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <psh_utils/PowerClientStats.h>
+#include <psh_utils/Token.h>
+
+#include <atomic>
+#include <memory>
+#include <string>
+
+namespace android::media::psh_utils {
+
+class AudioClientToken : public Token {
+public:
+    AudioClientToken(std::shared_ptr<PowerClientStats> powerClientStats, pid_t pid, uid_t uid,
+             const std::string& additional);
+    ~AudioClientToken() override;
+
+    // AudioPowerManager may call toString() while AudioToken is in its dtor.
+    // It is safe so long as toString is final.
+    std::string toString() const final;
+
+private:
+    const std::shared_ptr<PowerClientStats> mPowerClientStats;
+    const pid_t mPid;
+    const std::string mAdditional;
+    const size_t mId;
+    static constinit std::atomic<size_t> sIdCounter;
+};
+
+class AudioThreadToken : public Token {
+public:
+    AudioThreadToken(
+            pid_t tid, const std::string& wakeLockName,
+            WakeFlag wakeFlag, const std::string& additional);
+    ~AudioThreadToken() override;
+
+    // AudioPowerManager may call toString() while AudioToken is in its dtor.
+    // It is safe so long as toString is final.
+    std::string toString() const final;
+
+private:
+    const pid_t mTid;
+    const std::string mWakeLockName;
+    const WakeFlag mWakeFlag;
+    const std::string mAdditional;
+    const size_t mId;
+    static constinit std::atomic<size_t> sIdCounter;
+};
+
+class AudioTrackToken : public Token {
+public:
+    AudioTrackToken(
+            std::shared_ptr<PowerClientStats> powerClientStats, const std::string& additional);
+    ~AudioTrackToken() override;
+
+    // AudioPowerManager may call toString() while AudioToken is in its dtor.
+    // It is safe so long as toString is final.
+    std::string toString() const final;
+
+private:
+    const std::shared_ptr<PowerClientStats> mPowerClientStats;
+    const std::string mAdditional;
+    const size_t mId;
+    static constinit std::atomic<size_t> sIdCounter;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/HealthStats.cpp b/media/psh_utils/HealthStats.cpp
new file mode 100644
index 0000000..5e767f6
--- /dev/null
+++ b/media/psh_utils/HealthStats.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <psh_utils/HealthStats.h>
+
+namespace android::media::psh_utils {
+
+template <typename T>
+const T& choose_voltage(const T& a, const T& b) {
+   return std::max(a, b);  // we use max here, could use avg.
+}
+
+std::string HealthStats::toString() const {
+    std::string result;
+    const float batteryVoltage = batteryVoltageMillivolts * 1e-3f;  // Volts
+    const float charge = batteryChargeCounterUah * (3600 * 1e-6);  // Joules = Amp-Second
+    result.append("{Net Battery V: ")
+            .append(std::to_string(batteryVoltage))
+            .append(" J: ")
+            .append(std::to_string(charge))
+            .append("}");
+    return result;
+}
+
+std::string HealthStats::normalizedEnergy(double timeSec) const {
+    std::string result;
+    const float batteryVoltage = batteryVoltageMillivolts * 1e-3f;   // Volts
+    const float charge = -batteryChargeCounterUah * (3600 * 1e-6f);  // Joules = Amp-Second
+    const float watts = charge * batteryVoltage / timeSec;
+    result.append("{Net Battery V: ")
+            .append(std::to_string(batteryVoltage))
+            .append(" J: ")
+            .append(std::to_string(charge))
+            .append(" W: ")
+            .append(std::to_string(watts))
+            .append("}");
+    return result;
+}
+
+HealthStats HealthStats::operator+=(const HealthStats& other) {
+    batteryVoltageMillivolts = choose_voltage(
+            batteryVoltageMillivolts, other.batteryVoltageMillivolts);
+    batteryFullChargeUah = std::max(batteryFullChargeUah, other.batteryFullChargeUah);
+    batteryChargeCounterUah += other.batteryChargeCounterUah;
+    return *this;
+}
+
+HealthStats HealthStats::operator-=(const HealthStats& other) {
+    batteryVoltageMillivolts = choose_voltage(
+            batteryVoltageMillivolts, other.batteryVoltageMillivolts);
+    batteryFullChargeUah = std::max(batteryFullChargeUah, other.batteryFullChargeUah);
+    batteryChargeCounterUah -= other.batteryChargeCounterUah;
+    return *this;
+}
+
+HealthStats HealthStats::operator+(const HealthStats& other) const {
+    HealthStats result = *this;
+    result += other;
+    return result;
+}
+
+HealthStats HealthStats::operator-(const HealthStats& other) const {
+    HealthStats result = *this;
+    result -= other;
+    return result;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/HealthStatsProvider.cpp b/media/psh_utils/HealthStatsProvider.cpp
new file mode 100644
index 0000000..de72463
--- /dev/null
+++ b/media/psh_utils/HealthStatsProvider.cpp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PowerStatsProvider.h"
+#include <aidl/android/hardware/health/IHealth.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <psh_utils/ServiceSingleton.h>
+
+using ::aidl::android::hardware::health::HealthInfo;
+using ::aidl::android::hardware::health::IHealth;
+
+namespace android::media::psh_utils {
+
+static auto getHealthService() {
+    return getServiceSingleton<IHealth>();
+}
+
+status_t HealthStatsDataProvider::fill(PowerStats* stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    HealthStats& stats = stat->health_stats;
+    auto healthService = getHealthService();
+    if (healthService == nullptr) {
+        return NO_INIT;
+    }
+    HealthInfo healthInfo;
+    if (!healthService->getHealthInfo(&healthInfo).isOk()) {
+        LOG(ERROR) << __func__ << ": unable to get health info";
+        return INVALID_OPERATION;
+    }
+
+    stats.batteryVoltageMillivolts = healthInfo.batteryVoltageMillivolts;
+    stats.batteryFullChargeUah = healthInfo.batteryFullChargeUah;
+    stats.batteryChargeCounterUah = healthInfo.batteryChargeCounterUah;
+    return NO_ERROR;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerClientStats.cpp b/media/psh_utils/PowerClientStats.cpp
new file mode 100644
index 0000000..65f65a44
--- /dev/null
+++ b/media/psh_utils/PowerClientStats.cpp
@@ -0,0 +1,101 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <psh_utils/PowerClientStats.h>
+#include <mediautils/ServiceUtilities.h>
+
+namespace android::media::psh_utils {
+
+/* static */
+audio_utils::CommandThread& PowerClientStats::getCommandThread() {
+    [[clang::no_destroy]] static audio_utils::CommandThread ct;
+    return ct;
+}
+
+PowerClientStats::PowerClientStats(uid_t uid, const std::string& additional)
+        : mUid(uid), mAdditional(additional) {}
+
+void PowerClientStats::start(int64_t actualNs) {
+    std::lock_guard l(mMutex);
+    ++mTokenCount;
+    if (mStartNs == 0) mStartNs = actualNs;
+    if (mStartStats) return;
+    mStartStats = PowerStatsCollector::getCollector().getStats(kStatTimeToleranceNs);
+}
+
+void PowerClientStats::stop(int64_t actualNs) {
+    std::lock_guard l(mMutex);
+    if (--mTokenCount > 0) return;
+    if (mStartNs != 0) mCumulativeNs += actualNs - mStartNs;
+    mStartNs = 0;
+    if (!mStartStats) return;
+    const auto stopStats = PowerStatsCollector::getCollector().getStats(kStatTimeToleranceNs);
+    if (stopStats && stopStats != mStartStats) {
+        *mCumulativeStats += *stopStats - *mStartStats;
+    }
+    mStartStats.reset();
+}
+
+void PowerClientStats::addPid(pid_t pid) {
+    std::lock_guard l(mMutex);
+    mPids.emplace(pid);
+}
+
+size_t PowerClientStats::removePid(pid_t pid) {
+    std::lock_guard l(mMutex);
+    mPids.erase(pid);
+    return mPids.size();
+}
+
+std::string PowerClientStats::toString(bool stats, const std::string& prefix) const {
+    std::lock_guard l(mMutex);
+
+    // Adjust delta time and stats if currently running.
+    auto cumulativeStats = mCumulativeStats;
+    auto cumulativeNs = mCumulativeNs;
+    if (mStartNs) cumulativeNs += systemTime(SYSTEM_TIME_BOOTTIME) - mStartNs;
+    if (mStartStats) {
+        const auto stopStats = PowerStatsCollector::getCollector().getStats(kStatTimeToleranceNs);
+        if (stopStats && stopStats != mStartStats) {
+            auto newStats = std::make_shared<PowerStats>(*cumulativeStats);
+            *newStats += *stopStats - *mStartStats;
+            cumulativeStats = newStats;
+        }
+    }
+
+    std::string result(prefix);
+    result.append("uid: ")
+            .append(std::to_string(mUid))
+            .append(" ").append(mediautils::UidInfo::getInfo(mUid)->package)
+            .append(" streams: ").append(std::to_string(mTokenCount))
+            .append(" seconds: ").append(std::to_string(cumulativeNs * 1e-9));
+    result.append(" {");
+    for (auto pid : mPids) {
+        result.append(" ").append(std::to_string(pid));
+    }
+    result.append(" }");
+    if (!mAdditional.empty()) {
+        result.append("\n").append(prefix).append(mAdditional);
+    }
+    if (stats) {
+        std::string prefix2(prefix);
+        prefix2.append("  ");
+        result.append("\n").append(cumulativeStats->normalizedEnergy(prefix2));
+    }
+    return result;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStats.cpp b/media/psh_utils/PowerStats.cpp
new file mode 100644
index 0000000..f8f87c5
--- /dev/null
+++ b/media/psh_utils/PowerStats.cpp
@@ -0,0 +1,283 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <audio_utils/clock.h>
+#include <psh_utils/PowerStats.h>
+
+namespace android::media::psh_utils {
+
+// Determine the best start time from a and b, which is
+// min(a, b) if both exist, otherwise the one that exists.
+template <typename T>
+const T& choose_best_start_time(const T& a, const T& b) {
+    if (a) {
+        return b ? std::min(a, b) : a;
+    } else {
+        return b;
+    }
+}
+
+// subtract two time differences.
+template <typename T, typename U>
+const T sub_time_diff(const T& diff_a, const T& diff_b, const U& abs_c, const U& abs_d) {
+    if (diff_a) {
+        return diff_b ? (diff_a - diff_b) : diff_a;
+    } else if (diff_b) {
+        return diff_b;
+    } else {  // no difference exists, use absolute time.
+        return abs_c - abs_d;
+    }
+}
+
+std::string PowerStats::Metadata::toString() const {
+    return std::string("start_time_since_boot_ms: ").append(
+                    std::to_string(start_time_since_boot_ms))
+            .append(" start_time_monotonic_ms: ").append(std::to_string(start_time_monotonic_ms))
+            .append(audio_utils_time_string_from_ns(start_time_epoch_ms * 1'000'000).time)
+            .append(" duration_ms: ").append(std::to_string(duration_ms))
+            .append(" duration_monotonic_ms: ").append(std::to_string(duration_monotonic_ms));
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator+=(const Metadata& other) {
+    start_time_since_boot_ms = choose_best_start_time(
+            start_time_since_boot_ms, other.start_time_since_boot_ms);
+    start_time_epoch_ms = choose_best_start_time(
+            start_time_epoch_ms, other.start_time_epoch_ms);
+    start_time_monotonic_ms = choose_best_start_time(
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    duration_ms += other.duration_ms;
+    duration_monotonic_ms += other.duration_monotonic_ms;
+    return *this;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator-=(const Metadata& other) {
+    // here we calculate duration, if it makes sense.
+    duration_ms = sub_time_diff(duration_ms, other.duration_ms,
+                                start_time_since_boot_ms, other.start_time_since_boot_ms);
+    duration_monotonic_ms = sub_time_diff(
+            duration_monotonic_ms, other.duration_monotonic_ms,
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    start_time_since_boot_ms = choose_best_start_time(
+            start_time_since_boot_ms, other.start_time_since_boot_ms);
+    start_time_epoch_ms = choose_best_start_time(
+            start_time_epoch_ms, other.start_time_epoch_ms);
+    start_time_monotonic_ms = choose_best_start_time(
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    return *this;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator+(const Metadata& other) const {
+    Metadata result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator-(const Metadata& other) const {
+    Metadata result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::StateResidency::toString() const {
+    return std::string(entity_name).append(state_name)
+            .append(" ").append(std::to_string(time_ms))
+            .append(" ").append(std::to_string(entry_count));
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator+=(const StateResidency& other) {
+    if (entity_name.empty()) entity_name = other.entity_name;
+    if (state_name.empty()) state_name = other.state_name;
+    time_ms += other.time_ms;
+    entry_count += other.entry_count;
+    return *this;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator-=(const StateResidency& other) {
+    if (entity_name.empty()) entity_name = other.entity_name;
+    if (state_name.empty()) state_name = other.state_name;
+    time_ms -= other.time_ms;
+    entry_count -= other.entry_count;
+    return *this;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator+(
+        const StateResidency& other) const {
+    StateResidency result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator-(
+        const StateResidency& other) const {
+    StateResidency result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::RailEnergy::toString() const {
+    return std::string(subsystem_name)
+            .append(rail_name)
+            .append(" ")
+            .append(std::to_string(energy_uws));
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator+=(const RailEnergy& other) {
+    if (subsystem_name.empty()) subsystem_name = other.subsystem_name;
+    if (rail_name.empty()) rail_name = other.rail_name;
+    energy_uws += other.energy_uws;
+    return *this;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator-=(const RailEnergy& other) {
+    if (subsystem_name.empty()) subsystem_name = other.subsystem_name;
+    if (rail_name.empty()) rail_name = other.rail_name;
+    energy_uws -= other.energy_uws;
+    return *this;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator+(const RailEnergy& other) const {
+    RailEnergy result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator-(const RailEnergy& other) const {
+    RailEnergy result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::toString(const std::string& prefix) const {
+    std::string result;
+    result.append(prefix).append(metadata.toString()).append("\n");
+    result.append(prefix).append(health_stats.toString()).append("\n");
+    for (const auto &residency: power_entity_state_residency) {
+        result.append(prefix).append(residency.toString()).append("\n");
+    }
+    for (const auto &energy: rail_energy) {
+        result.append(prefix).append(energy.toString()).append("\n");
+    }
+    return result;
+}
+
+std::string PowerStats::normalizedEnergy(const std::string& prefix) const {
+    if (metadata.duration_ms == 0) return {};
+
+    std::string result(prefix);
+    result.append(audio_utils_time_string_from_ns(
+            metadata.start_time_epoch_ms * 1'000'000).time);
+    result.append(" duration_boottime: ")
+            .append(std::to_string(metadata.duration_ms * 1e-3f))
+            .append(" duration_monotonic: ")
+            .append(std::to_string(metadata.duration_monotonic_ms * 1e-3f))
+            .append("\n");
+    if (health_stats.isValid()) {
+        result.append(prefix)
+                .append(health_stats.normalizedEnergy(metadata.duration_ms * 1e-3f)).append("\n");
+    }
+
+    // energy_uws is converted to ave W using recip time in us.
+    const float recipTime = 1e-3 / metadata.duration_ms;
+    int64_t total_energy = 0;
+    for (const auto& energy: rail_energy) {
+        total_energy += energy.energy_uws;
+        result.append(prefix).append(energy.subsystem_name)
+                .append(energy.rail_name)
+                .append(" ")
+                .append(std::to_string(energy.energy_uws * 1e-6))
+                .append(" ")
+                .append(std::to_string(energy.energy_uws * recipTime))
+                .append("\n");
+    }
+    if (total_energy != 0) {
+        result.append(prefix).append("total J and ave W: ")
+                .append(std::to_string(total_energy * 1e-6))
+                .append(" ")
+                .append(std::to_string(total_energy * recipTime))
+                .append("\n");
+    }
+    return result;
+}
+
+// seconds, joules, watts
+std::tuple<float, float, float> PowerStats::energyFrom(const std::string& railMatcher) const {
+    if (metadata.duration_ms == 0) return {};
+
+    // energy_uws is converted to ave W using recip time in us.
+    const float recipTime = 1e-3 / metadata.duration_ms;
+    int64_t total_energy = 0;
+    for (const auto& energy: rail_energy) {
+        if (energy.subsystem_name.find(railMatcher) != std::string::npos
+                || energy.rail_name.find(railMatcher) != std::string::npos) {
+            total_energy += energy.energy_uws;
+        }
+    }
+    return {metadata.duration_ms * 1e-3, total_energy * 1e-6, total_energy * recipTime};
+}
+
+PowerStats PowerStats::operator+=(const PowerStats& other) {
+    metadata += other.metadata;
+    health_stats += other.health_stats;
+    if (power_entity_state_residency.empty()) {
+        power_entity_state_residency = other.power_entity_state_residency;
+    } else {
+        for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
+            power_entity_state_residency[i] += other.power_entity_state_residency[i];
+        }
+    }
+    if (rail_energy.empty()) {
+        rail_energy = other.rail_energy;
+    } else {
+        for (size_t i = 0; i < rail_energy.size(); ++i) {
+            rail_energy[i] += other.rail_energy[i];
+        }
+    }
+    return *this;
+}
+
+PowerStats PowerStats::operator-=(const PowerStats& other) {
+    metadata -= other.metadata;
+    health_stats -= other.health_stats;
+    if (power_entity_state_residency.empty()) {
+        power_entity_state_residency = other.power_entity_state_residency;
+    } else {
+        for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
+            power_entity_state_residency[i] -= other.power_entity_state_residency[i];
+        }
+    }
+    if (rail_energy.empty()) {
+        rail_energy = other.rail_energy;
+    } else {
+        for (size_t i = 0; i < rail_energy.size(); ++i) {
+            rail_energy[i] -= other.rail_energy[i];
+        }
+    }
+    return *this;
+}
+
+PowerStats PowerStats::operator+(const PowerStats& other) const {
+    PowerStats result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats PowerStats::operator-(const PowerStats& other) const {
+    PowerStats result = *this;
+    result -= other;
+    return result;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsCollector.cpp b/media/psh_utils/PowerStatsCollector.cpp
new file mode 100644
index 0000000..e5bf2aa
--- /dev/null
+++ b/media/psh_utils/PowerStatsCollector.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <psh_utils/PowerStatsCollector.h>
+#include "PowerStatsProvider.h"
+#include <utils/Timers.h>
+
+namespace android::media::psh_utils {
+
+PowerStatsCollector::PowerStatsCollector() {
+    addProvider(std::make_unique<PowerEntityResidencyDataProvider>());
+    addProvider(std::make_unique<RailEnergyDataProvider>());
+    addProvider(std::make_unique<HealthStatsDataProvider>());
+}
+
+/* static */
+PowerStatsCollector& PowerStatsCollector::getCollector() {
+    [[clang::no_destroy]] static PowerStatsCollector psc;
+    return psc;
+}
+
+std::shared_ptr<const PowerStats> PowerStatsCollector::getStats(int64_t toleranceNs) {
+    // Check if there is a cached PowerStats result available.
+    // As toleranceNs may be different between callers, it may be that some callers
+    // are blocked on mMutexExclusiveFill for a new stats result, while other callers
+    // may find the current cached result acceptable (within toleranceNs).
+    if (toleranceNs > 0) {
+        auto result = checkLastStats(toleranceNs);
+        if (result) return result;
+    }
+
+    // Take the mMutexExclusiveFill to ensure only one thread is filling.
+    std::lock_guard lg1(mMutexExclusiveFill);
+    // As obtaining a new PowerStats snapshot might take some time,
+    // check again to see if another waiting thread filled the cached result for us.
+    if (toleranceNs > 0) {
+        auto result = checkLastStats(toleranceNs);
+        if (result) return result;
+    }
+    auto result = std::make_shared<PowerStats>();
+    (void)fill(result.get());
+    std::lock_guard lg2(mMutex);
+    mLastFetchNs = systemTime(SYSTEM_TIME_BOOTTIME);
+    mLastFetchStats = result;
+    return result;
+}
+
+std::shared_ptr<const PowerStats> PowerStatsCollector::checkLastStats(int64_t toleranceNs) const {
+    if (toleranceNs > 0) {
+        // see if we can return an old result.
+        std::lock_guard lg(mMutex);
+        if (mLastFetchStats && systemTime(SYSTEM_TIME_BOOTTIME) - mLastFetchNs < toleranceNs) {
+            return mLastFetchStats;
+        }
+    }
+    return {};
+}
+
+void PowerStatsCollector::addProvider(std::unique_ptr<PowerStatsProvider>&& powerStatsProvider) {
+    mPowerStatsProviders.emplace_back(std::move(powerStatsProvider));
+}
+
+int PowerStatsCollector::fill(PowerStats* stats) const {
+    if (!stats) {
+        LOG(ERROR) << __func__ << ": bad args; stat is null";
+        return 1;
+    }
+
+    for (const auto& provider : mPowerStatsProviders) {
+        (void) provider->fill(stats); // on error, we continue to proceed.
+    }
+
+    // boot time follows wall clock time, but starts from boot.
+    stats->metadata.start_time_since_boot_ms = systemTime(SYSTEM_TIME_BOOTTIME) / 1'000'000;
+
+    // wall clock time
+    stats->metadata.start_time_epoch_ms = systemTime(SYSTEM_TIME_REALTIME) / 1'000'000;
+
+    // monotonic time follows boot time, but does not include any time suspended.
+    stats->metadata.start_time_monotonic_ms = systemTime() / 1'000'000;
+    return 0;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsProvider.cpp b/media/psh_utils/PowerStatsProvider.cpp
new file mode 100644
index 0000000..112c323
--- /dev/null
+++ b/media/psh_utils/PowerStatsProvider.cpp
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PowerStatsProvider.h"
+#include <aidl/android/hardware/power/stats/IPowerStats.h>
+#include <android-base/logging.h>
+#include <psh_utils/ServiceSingleton.h>
+#include <unordered_map>
+
+using ::aidl::android::hardware::power::stats::IPowerStats;
+
+namespace android::media::psh_utils {
+
+static auto getPowerStatsService() {
+    return getServiceSingleton<IPowerStats>();
+}
+
+status_t RailEnergyDataProvider::fill(PowerStats *stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    auto powerStatsService = getPowerStatsService();
+    if (powerStatsService == nullptr) {
+        return NO_INIT;
+    }
+
+    std::unordered_map<int32_t, ::aidl::android::hardware::power::stats::Channel> channelMap;
+    {
+        std::vector<::aidl::android::hardware::power::stats::Channel> channels;
+        if (!powerStatsService->getEnergyMeterInfo(&channels).isOk()) {
+            LOG(ERROR) << "unable to get energy meter info";
+            return INVALID_OPERATION;
+        }
+        for (auto& channel : channels) {
+          channelMap.emplace(channel.id, std::move(channel));
+        }
+    }
+
+    std::vector<::aidl::android::hardware::power::stats::EnergyMeasurement> measurements;
+    if (!powerStatsService->readEnergyMeter({}, &measurements).isOk()) {
+        LOG(ERROR) << "unable to get energy measurements";
+        return INVALID_OPERATION;
+    }
+
+    for (const auto& measurement : measurements) {
+        stat->rail_energy.emplace_back(
+            channelMap.at(measurement.id).subsystem,
+            channelMap.at(measurement.id).name,
+            measurement.energyUWs);
+    }
+
+    // Sort entries first by subsystem_name, then by rail_name.
+    // Sorting is needed to make interval processing efficient.
+    std::sort(stat->rail_energy.begin(), stat->rail_energy.end(),
+              [](const auto& a, const auto& b) {
+                  if (a.subsystem_name != b.subsystem_name) {
+                      return a.subsystem_name < b.subsystem_name;
+                  }
+                  return a.rail_name < b.rail_name;
+              });
+
+    return NO_ERROR;
+}
+
+status_t PowerEntityResidencyDataProvider::fill(PowerStats* stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    auto powerStatsService = getPowerStatsService();
+    if (powerStatsService == nullptr) {
+        return NO_INIT;
+    }
+
+    // these are based on entityId
+    std::unordered_map<int32_t, std::string> entityNames;
+    std::unordered_map<int32_t, std::unordered_map<int32_t, std::string>> stateNames;
+    std::vector<int32_t> powerEntityIds; // ids to use
+
+    {
+        std::vector<::aidl::android::hardware::power::stats::PowerEntity> entities;
+        if (!powerStatsService->getPowerEntityInfo(&entities).isOk()) {
+            LOG(ERROR) << __func__ << ": unable to get entity info";
+            return INVALID_OPERATION;
+        }
+
+        std::vector<std::string> powerEntityNames;
+        for (const auto& entity : entities) {
+            std::unordered_map<int32_t, std::string> states;
+            for (const auto& state : entity.states) {
+                states.emplace(state.id, state.name);
+            }
+
+            if (std::find(powerEntityNames.begin(), powerEntityNames.end(), entity.name) !=
+                powerEntityNames.end()) {
+                powerEntityIds.emplace_back(entity.id);
+            }
+            entityNames.emplace(entity.id, std::move(entity.name));
+            stateNames.emplace(entity.id, std::move(states));
+        }
+    }
+
+    std::vector<::aidl::android::hardware::power::stats::StateResidencyResult> results;
+    if (!powerStatsService->getStateResidency(powerEntityIds, &results).isOk()) {
+        LOG(ERROR) << __func__ << ": Unable to get state residency";
+        return INVALID_OPERATION;
+    }
+
+    for (const auto& result : results) {
+        for (const auto& curStateResidency : result.stateResidencyData) {
+          stat->power_entity_state_residency.emplace_back(
+              entityNames.at(result.id),
+              stateNames.at(result.id).at(curStateResidency.id),
+              static_cast<uint64_t>(curStateResidency.totalTimeInStateMs),
+              static_cast<uint64_t>(curStateResidency.totalStateEntryCount));
+        }
+    }
+
+    // Sort entries first by entity_name, then by state_name.
+    // Sorting is needed to make interval processing efficient.
+    std::sort(stat->power_entity_state_residency.begin(),
+              stat->power_entity_state_residency.end(),
+              [](const auto& a, const auto& b) {
+                  if (a.entity_name != b.entity_name) {
+                      return a.entity_name < b.entity_name;
+                  }
+                  return a.state_name < b.state_name;
+              });
+    return NO_ERROR;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsProvider.h b/media/psh_utils/PowerStatsProvider.h
new file mode 100644
index 0000000..c3888ac
--- /dev/null
+++ b/media/psh_utils/PowerStatsProvider.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <psh_utils/PowerStatsCollector.h>
+
+namespace android::media::psh_utils {
+
+class RailEnergyDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+class PowerEntityResidencyDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+class HealthStatsDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/benchmarks/Android.bp b/media/psh_utils/benchmarks/Android.bp
new file mode 100644
index 0000000..2382c69
--- /dev/null
+++ b/media/psh_utils/benchmarks/Android.bp
@@ -0,0 +1,75 @@
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_benchmark {
+    name: "audio_powerstats_benchmark",
+
+    srcs: ["audio_powerstats_benchmark.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    static_libs: [
+        "libpshutils",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+}
+
+cc_benchmark {
+    name: "audio_powerstatscollector_benchmark",
+
+    srcs: ["audio_powerstatscollector_benchmark.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    static_libs: [
+        "libpshutils",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+}
+
+cc_benchmark {
+    name: "audio_token_benchmark",
+
+    srcs: ["audio_token_benchmark.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    static_libs: [
+        "libpshutils",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libmediautils",
+        "libutils",
+    ],
+}
diff --git a/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp b/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp
new file mode 100644
index 0000000..4d8b224
--- /dev/null
+++ b/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp
@@ -0,0 +1,273 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "audio_powerstat_benchmark"
+#include <cutils/properties.h>
+#include <utils/Log.h>
+
+#include <psh_utils/PerformanceFixture.h>
+
+#include <algorithm>
+#include <android-base/strings.h>
+#include <random>
+#include <thread>
+#include <vector>
+
+/*
+Pixel 9 XL Pro
+---------------------------------------------------------------------------------------------------------------
+Benchmark                                               Time                        CPU              Iteration
+---------------------------------------------------------------------------------------------------------------
+audio_powerstats_benchmark:
+  #MemoryFixture/CacheAccess/64/0/0/1             5.195761589711465 ns       5.183635029038574 ns    135160912
+  #MemoryFixture/CacheAccess/128/0/0/1            10.37270431027728 ns      10.341754343667125 ns     67574354
+  #MemoryFixture/CacheAccess/256/0/0/1           20.767353363364098 ns      20.708496782017836 ns     33809541
+  #MemoryFixture/CacheAccess/512/0/0/1            41.53473855852046 ns       41.45724926375999 ns     16900399
+  #MemoryFixture/CacheAccess/1024/0/0/1           82.89673650172568 ns       82.68064919937272 ns      8462177
+  #MemoryFixture/CacheAccess/2048/0/0/1          165.77648929323732 ns      165.45127650324827 ns      4227878
+  #MemoryFixture/CacheAccess/4096/0/0/1           331.9272979248067 ns       331.0722959129879 ns      2114919
+  #MemoryFixture/CacheAccess/8192/0/0/1           663.8090302013887 ns       662.2813002594532 ns      1054528
+  #MemoryFixture/CacheAccess/16384/0/0/1         1327.4224893455748 ns      1324.0114138292752 ns       529095
+  #MemoryFixture/CacheAccess/32768/0/0/1         2657.1037276954685 ns      2651.8974883509522 ns       263970
+  #MemoryFixture/CacheAccess/65536/0/0/1          5314.170125835522 ns        5305.20127734871 ns       131679
+  #MemoryFixture/CacheAccess/131072/0/0/1        10624.517848490625 ns      10602.467739493763 ns        66056
+  #MemoryFixture/CacheAccess/262144/0/0/1         21271.09560700047 ns      21224.851075464823 ns        32916
+  #MemoryFixture/CacheAccess/524288/0/0/1         42556.76641626909 ns       42444.65628786041 ns        16508
+  #MemoryFixture/CacheAccess/1048576/0/0/1         85440.6313100312 ns       85076.15703685701 ns         8221
+  #MemoryFixture/CacheAccess/2097152/0/0/1       170908.37391089948 ns      169402.58059051324 ns         4132
+  #MemoryFixture/CacheAccess/4194304/0/0/1        373635.4350000207 ns      372955.40777777723 ns         1800
+  #MemoryFixture/CacheAccess/8388608/0/0/1        685101.2127660838 ns       681594.2330754338 ns         1034
+  #MemoryFixture/CacheAccess/16777216/0/0/1       1588009.158696047 ns      1581510.0217391246 ns          460
+  #MemoryFixture/CacheAccess/33554432/0/0/1      2721626.5387591715 ns       2708149.166666674 ns          258
+  #MemoryFixture/CacheAccess/67108864/0/0/1       5433705.728680161 ns       5413515.914728661 ns          129
+  #MemoryFixture/CacheAccess/64/1/0/1             5.201213751848433 ns       5.180967321755995 ns    135673202
+  #MemoryFixture/CacheAccess/128/1/0/1           10.386209252693448 ns      10.351378045484042 ns     67486234
+  #MemoryFixture/CacheAccess/256/1/0/1           20.742666405371747 ns      20.686210353062208 ns     33848169
+  #MemoryFixture/CacheAccess/512/1/0/1            41.52781367071582 ns        41.4438085044977 ns     16870391
+  #MemoryFixture/CacheAccess/1024/1/0/1           83.03849985460687 ns        82.6732197351271 ns      8442915
+  #MemoryFixture/CacheAccess/2048/1/0/1          166.02706801365886 ns      165.60695561393828 ns      4226169
+  #MemoryFixture/CacheAccess/4096/1/0/1          332.05696890075365 ns       331.3395040136246 ns      2112679
+  #MemoryFixture/CacheAccess/8192/1/0/1           664.3009073119205 ns       662.5811781316487 ns      1055315
+  #MemoryFixture/CacheAccess/16384/1/0/1         1329.0792867154223 ns      1325.0185471435798 ns       527251
+  #MemoryFixture/CacheAccess/32768/1/0/1         2652.9089904482526 ns      2645.5388137876826 ns       264236
+  #MemoryFixture/CacheAccess/65536/1/0/1          5312.635002724743 ns       5300.412875575496 ns       132064
+  #MemoryFixture/CacheAccess/131072/1/0/1        10625.299202810635 ns      10594.376178351697 ns        65982
+  #MemoryFixture/CacheAccess/262144/1/0/1        21270.763359464152 ns      21206.192921372138 ns        33029
+  #MemoryFixture/CacheAccess/524288/1/0/1         42496.14168177758 ns      42381.692498487435 ns        16530
+  #MemoryFixture/CacheAccess/1048576/1/0/1        85425.34302253627 ns       85063.54206182965 ns         8119
+  #MemoryFixture/CacheAccess/2097152/1/0/1        170961.8011639407 ns      169732.18840931158 ns         4124
+  #MemoryFixture/CacheAccess/4194304/1/0/1       440086.77439029363 ns      439010.07195122127 ns         1640
+  #MemoryFixture/CacheAccess/8388608/1/0/1        677684.5246376056 ns        675888.058937202 ns         1035
+  #MemoryFixture/CacheAccess/16777216/1/0/1      1571417.6297115851 ns      1566423.4922394755 ns          451
+  #MemoryFixture/CacheAccess/33554432/1/0/1       2723418.325581582 ns      2708535.8062015465 ns          258
+  #MemoryFixture/CacheAccess/67108864/1/0/1       5435209.372094963 ns       5413991.821705426 ns          129
+  #MemoryFixture/CacheAccess/64/2/0/1             5.204700890931938 ns       5.183036658664568 ns    135406460
+  #MemoryFixture/CacheAccess/128/2/0/1           10.376355078178406 ns      10.348754235460566 ns     67518337
+  #MemoryFixture/CacheAccess/256/2/0/1           20.726238269323797 ns      20.670377070062745 ns     33834057
+  #MemoryFixture/CacheAccess/512/2/0/1            41.49336362336435 ns       41.38084547087122 ns     16890919
+  #MemoryFixture/CacheAccess/1024/2/0/1           82.96761236822086 ns        82.7676652782051 ns      8440753
+  #MemoryFixture/CacheAccess/2048/2/0/1          165.90277344671065 ns      165.63781837950896 ns      4223301
+  #MemoryFixture/CacheAccess/4096/2/0/1          332.08415463794364 ns      331.07455763248197 ns      2110971
+  #MemoryFixture/CacheAccess/8192/2/0/1            662.569068830069 ns       661.1656746088121 ns      1055382
+  #MemoryFixture/CacheAccess/16384/2/0/1         1327.7767031437843 ns       1324.268331214332 ns       528552
+  #MemoryFixture/CacheAccess/32768/2/0/1          2654.714983405558 ns      2647.9097623853727 ns       264546
+  #MemoryFixture/CacheAccess/65536/2/0/1          5304.774145979664 ns       5290.380748589911 ns       131554
+  #MemoryFixture/CacheAccess/131072/2/0/1          10631.0978039924 ns      10602.125724165107 ns        65938
+  #MemoryFixture/CacheAccess/262144/2/0/1        21258.936606489668 ns      21202.585867458216 ns        33016
+  #MemoryFixture/CacheAccess/524288/2/0/1         42460.85577331506 ns      42355.304775195626 ns        16481
+  #MemoryFixture/CacheAccess/1048576/2/0/1        85428.60153206348 ns       85160.29036964968 ns         8224
+  #MemoryFixture/CacheAccess/2097152/2/0/1       170233.91140170072 ns      169409.06511740564 ns         4131
+  #MemoryFixture/CacheAccess/4194304/2/0/1        402022.9022378908 ns      401018.78327444167 ns         1698
+  #MemoryFixture/CacheAccess/8388608/2/0/1        677677.2908216701 ns       675843.1642512042 ns         1035
+  #MemoryFixture/CacheAccess/16777216/2/0/1      1554294.1339100641 ns       1549490.831533465 ns          463
+  #MemoryFixture/CacheAccess/33554432/2/0/1       2722937.453488912 ns       2709007.093023249 ns          258
+  #MemoryFixture/CacheAccess/67108864/2/0/1      5435791.6511618495 ns       5415184.511627913 ns          129
+  #MemoryFixture/CacheAccess/64/0/2/1             5.198916380394579 ns       5.178607363536682 ns    135270162
+  #MemoryFixture/CacheAccess/128/0/2/1            10.39285189976819 ns      10.361873548918089 ns     67571996
+  #MemoryFixture/CacheAccess/256/0/2/1            20.75920269645178 ns       20.69937217558235 ns     33765810
+  #MemoryFixture/CacheAccess/512/0/2/1            41.51556112567147 ns      41.372342254073665 ns     16947585
+  #MemoryFixture/CacheAccess/1024/0/2/1           82.96516513710067 ns       82.78508396196703 ns      8459485
+  #MemoryFixture/CacheAccess/2048/0/2/1          166.19624228249646 ns      165.74873814180103 ns      4221552
+  #MemoryFixture/CacheAccess/4096/0/2/1           331.7269469760912 ns      330.91601941885546 ns      2111762
+  #MemoryFixture/CacheAccess/8192/0/2/1           663.8953077112178 ns       662.4540856828183 ns      1059419
+  #MemoryFixture/CacheAccess/16384/0/2/1          1326.843343898467 ns       1323.254749209487 ns       527193
+  #MemoryFixture/CacheAccess/32768/0/2/1         2656.6743123958327 ns      2651.2139933123217 ns       264069
+  #MemoryFixture/CacheAccess/65536/0/2/1          5316.515245822549 ns       5306.343742646622 ns       131741
+  #MemoryFixture/CacheAccess/131072/0/2/1         10623.00981164003 ns      10584.927048634307 ns        66044
+  #MemoryFixture/CacheAccess/262144/0/2/1        21210.760294289023 ns      21148.895028460767 ns        33028
+  #MemoryFixture/CacheAccess/524288/0/2/1         42522.49017237178 ns       42412.58560628969 ns        16535
+  #MemoryFixture/CacheAccess/1048576/0/2/1        86800.15632693251 ns       86501.64057114806 ns         8124
+  #MemoryFixture/CacheAccess/2097152/0/2/1       177705.58147680553 ns      177010.52665832458 ns         3995
+  #MemoryFixture/CacheAccess/4194304/0/2/1        449051.5944408481 ns        448237.065698044 ns         1583
+  #MemoryFixture/CacheAccess/8388608/0/2/1       1389931.2189924652 ns      1386035.8565891527 ns          516
+  #MemoryFixture/CacheAccess/16777216/0/2/1       4438020.074999826 ns       4420751.918750021 ns          160
+  #MemoryFixture/CacheAccess/33554432/0/2/1     1.730178560976084E7 ns    1.7182623121951293E7 ns           41
+  #MemoryFixture/CacheAccess/67108864/0/2/1     5.283456416664952E7 ns     5.258297450000053E7 ns           12
+  #MemoryFixture/CacheAccess/64/1/2/1             5.204121573005314 ns       5.179569988739665 ns    135600170
+  #MemoryFixture/CacheAccess/128/1/2/1           10.387460007442089 ns      10.354541036512236 ns     67512560
+  #MemoryFixture/CacheAccess/256/1/2/1            20.77893771735786 ns      20.727591539055314 ns     33750321
+  #MemoryFixture/CacheAccess/512/1/2/1             41.4739992379063 ns      41.315239742240664 ns     16908639
+  #MemoryFixture/CacheAccess/1024/1/2/1           82.95454097741914 ns        82.7976946763163 ns      8446970
+  #MemoryFixture/CacheAccess/2048/1/2/1          165.86154320354674 ns      165.43862697234525 ns      4233855
+  #MemoryFixture/CacheAccess/4096/1/2/1           331.8942415618145 ns      331.28362462222265 ns      2109704
+  #MemoryFixture/CacheAccess/8192/1/2/1           663.6968508366361 ns        662.640989545053 ns      1057011
+  #MemoryFixture/CacheAccess/16384/1/2/1          1328.002697434852 ns         1325.3893625606 ns       527909
+  #MemoryFixture/CacheAccess/32768/1/2/1          2656.826225607798 ns       2651.831997636693 ns       264032
+  #MemoryFixture/CacheAccess/65536/1/2/1          5313.403312198365 ns      5296.6562514184625 ns       132178
+  #MemoryFixture/CacheAccess/131072/1/2/1        10603.411688232678 ns      10580.430523642488 ns        66152
+  #MemoryFixture/CacheAccess/262144/1/2/1         21213.68814698657 ns       21160.64858647625 ns        33038
+  #MemoryFixture/CacheAccess/524288/1/2/1         42446.96972817497 ns       42358.49900102921 ns        16517
+  #MemoryFixture/CacheAccess/1048576/1/2/1        85427.89922199522 ns       85099.99477267203 ns         8226
+  #MemoryFixture/CacheAccess/2097152/1/2/1       179576.28781830988 ns      178747.97179230847 ns         4006
+  #MemoryFixture/CacheAccess/4194304/1/2/1        453971.4271099782 ns      453200.38874680526 ns         1564
+  #MemoryFixture/CacheAccess/8388608/1/2/1        1413810.749999729 ns      1409767.6830708506 ns          508
+  #MemoryFixture/CacheAccess/16777216/1/2/1       4481396.176099637 ns       4463691.635220161 ns          159
+  #MemoryFixture/CacheAccess/33554432/1/2/1    1.7363190725006916E7 ns    1.7271956449999947E7 ns           40
+  #MemoryFixture/CacheAccess/67108864/1/2/1     5.310257300000861E7 ns     5.283166808333325E7 ns           12
+  #MemoryFixture/CacheAccess/64/2/2/1             5.194585073441566 ns       5.169936491706671 ns    135797225
+  #MemoryFixture/CacheAccess/128/2/2/1           10.375776978615239 ns      10.351150907177248 ns     67504271
+  #MemoryFixture/CacheAccess/256/2/2/1            20.73537619800892 ns      20.682392672592464 ns     33819437
+  #MemoryFixture/CacheAccess/512/2/2/1            41.46680809632523 ns       41.35825233901641 ns     16913944
+  #MemoryFixture/CacheAccess/1024/2/2/1           82.84606240770246 ns        82.6134204462559 ns      8446202
+  #MemoryFixture/CacheAccess/2048/2/2/1          165.87278324509214 ns      165.32429617950757 ns      4232933
+  #MemoryFixture/CacheAccess/4096/2/2/1           331.2406082982817 ns       330.5629607515063 ns      2116922
+  #MemoryFixture/CacheAccess/8192/2/2/1            663.159315900038 ns       661.6959690484747 ns      1056103
+  #MemoryFixture/CacheAccess/16384/2/2/1         1327.5666883524236 ns      1324.3426747207684 ns       528758
+  #MemoryFixture/CacheAccess/32768/2/2/1         2654.9809699966722 ns       2648.132907418347 ns       264372
+  #MemoryFixture/CacheAccess/65536/2/2/1           5314.47981229803 ns       5303.806613499892 ns       131912
+  #MemoryFixture/CacheAccess/131072/2/2/1         10606.19082560071 ns      10585.181869071148 ns        66097
+  #MemoryFixture/CacheAccess/262144/2/2/1        21187.819721722273 ns      21154.040502117125 ns        33060
+  #MemoryFixture/CacheAccess/524288/2/2/1         42442.62465239758 ns       42311.67198645875 ns        16542
+  #MemoryFixture/CacheAccess/1048576/2/2/1        85539.82432104382 ns       85200.15385547924 ns         8248
+  #MemoryFixture/CacheAccess/2097152/2/2/1         180928.070870083 ns      180122.69382093282 ns         3965
+  #MemoryFixture/CacheAccess/4194304/2/2/1       456790.68648981495 ns       455950.1693600544 ns         1547
+  #MemoryFixture/CacheAccess/8388608/2/2/1       1427351.7287124782 ns      1423232.2613861358 ns          505
+  #MemoryFixture/CacheAccess/16777216/2/2/1       4513772.829113805 ns       4495839.088607608 ns          158
+  #MemoryFixture/CacheAccess/33554432/2/2/1    1.7454686475002743E7 ns    1.7364546800000016E7 ns           40
+  #MemoryFixture/CacheAccess/67108864/2/2/1    5.2963768833327174E7 ns     5.266439433333403E7 ns           12
+
+ */
+float result = 0;
+
+using android::media::psh_utils::CoreClass;
+using android::media::psh_utils::CORE_LITTLE;
+using android::media::psh_utils::CORE_MID;
+using android::media::psh_utils::CORE_BIG;
+
+enum Direction {
+    DIRECTION_FORWARD,
+    DIRECTION_BACKWARD,
+    DIRECTION_RANDOM,
+};
+
+std::string toString(Direction direction) {
+    switch (direction) {
+        case DIRECTION_FORWARD: return "DIRECTION_FORWARD";
+        case DIRECTION_BACKWARD: return "DIRECTION_BACKWARD";
+        case DIRECTION_RANDOM: return "DIRECTION_RANDOM";
+        default: return "DIRECTION_UNKNOWN";
+    }
+}
+
+enum Content {
+    CONTENT_ZERO,
+    CONTENT_RANDOM,
+};
+
+std::string toString(Content content) {
+    switch (content) {
+        case CONTENT_ZERO: return "CONTENT_ZERO";
+        case CONTENT_RANDOM: return "CONTENT_RANDOM";
+        default: return "CONTENT_UNKNOWN";
+    }
+}
+
+class MemoryFixture : public android::media::psh_utils::PerformanceFixture {
+public:
+    void SetUp(benchmark::State& state) override {
+        mCount = state.range(0) / (sizeof(uint32_t) + sizeof(float));
+        state.SetComplexityN(mCount * 2);  // 2 access per iteration.
+
+        // create src distribution
+        mSource.resize(mCount);
+        const auto content = static_cast<Content>(state.range(3));
+        if (content == CONTENT_RANDOM) {
+            std::minstd_rand gen(mCount);
+            std::uniform_real_distribution<float> dis(-1.f, 1.f);
+            for (size_t i = 0; i < mCount; i++) {
+                mSource[i] = dis(gen);
+            }
+        }
+
+        // create direction
+        mIndex.resize(mCount);
+        const auto direction = static_cast<Direction>(state.range(2));
+        switch (direction) {
+            case DIRECTION_BACKWARD:
+                for (size_t i = 0; i < mCount; i++) {
+                    mIndex[i] = i;  // it is also possible to go in the reverse direction
+                }
+                break;
+            case DIRECTION_FORWARD:
+            case DIRECTION_RANDOM:
+                for (size_t i = 0; i < mCount; i++) {
+                    mIndex[i] = i;  // it is also possible to go in the reverse direction
+                }
+                if (direction == DIRECTION_RANDOM) {
+                    std::random_device rd;
+                    std::mt19937 g(rd());
+                    std::shuffle(mIndex.begin(), mIndex.end(), g);
+                }
+                break;
+        }
+
+        // set up the profiler
+        const auto coreClass = static_cast<CoreClass>(state.range(1));
+
+        // It would be best if we could override SetName() but it is too late at this point,
+        // so we set the state label here for clarity.
+        state.SetLabel(toString(coreClass).append("/")
+            .append(toString(direction)).append("/")
+            .append(toString(content)));
+
+        if (property_get_bool("persist.audio.benchmark_profile", false)) {
+            startProfiler(coreClass);
+        }
+    }
+    size_t mCount = 0;
+    std::vector<uint32_t> mIndex;
+    std::vector<float> mSource;
+};
+
+BENCHMARK_DEFINE_F(MemoryFixture, CacheAccess)(benchmark::State &state) {
+    float accum = 0;
+    while (state.KeepRunning()) {
+        for (size_t i = 0; i < mCount; ++i) {
+            accum += mSource[mIndex[i]];
+        }
+        benchmark::ClobberMemory();
+    }
+    result += accum; // not optimized
+}
+
+BENCHMARK_REGISTER_F(MemoryFixture, CacheAccess)->ArgsProduct({
+    benchmark::CreateRange(64, 64<<20, /* multi = */2),
+    {CORE_LITTLE, CORE_MID, CORE_BIG},
+    {DIRECTION_FORWARD, DIRECTION_RANDOM},
+    {CONTENT_RANDOM},
+});
+
+BENCHMARK_MAIN();
diff --git a/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp b/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp
new file mode 100644
index 0000000..021eb5a
--- /dev/null
+++ b/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "audio_token_benchmark"
+#include <utils/Log.h>
+
+#include <psh_utils/PowerStatsCollector.h>
+
+#include <benchmark/benchmark.h>
+
+/*
+ Pixel 9 Pro XL
+ (tolerance is the amount of time a cached value is valid).
+------------------------------------------------------------------------------------------
+ Benchmark                            Time                      CPU             Iteration
+------------------------------------------------------------------------------------------
+audio_powerstatscollector_benchmark:
+  #BM_StatsToleranceMs/0      6.346578290999787E7 ns            2069264.56 ns          100
+  #BM_StatsToleranceMs/50      454.12461256065177 ns     203.1644161064639 ns      2615571
+  #BM_StatsToleranceMs/100     167.74983887731364 ns    101.99598388920647 ns      5436852
+  #BM_StatsToleranceMs/200     102.57950838168422 ns     79.40969988086803 ns      7600815
+  #BM_StatsToleranceMs/500      86.87348495571898 ns     75.24841434306252 ns      9789318
+*/
+
+// We check how expensive it is to query stats depending
+// on the tolerance to reuse the cached values.
+// A tolerance of 0 means we always fetch stats.
+static void BM_StatsToleranceMs(benchmark::State& state) {
+    auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+    const int64_t toleranceNs = state.range(0) * 1'000'000;
+    while (state.KeepRunning()) {
+        collector.getStats(toleranceNs);
+        benchmark::ClobberMemory();
+    }
+}
+
+// Here we test various time tolerances (given in milliseconds here)
+BENCHMARK(BM_StatsToleranceMs)->Arg(0)->Arg(50)->Arg(100)->Arg(200)->Arg(500);
+
+BENCHMARK_MAIN();
diff --git a/media/psh_utils/benchmarks/audio_token_benchmark.cpp b/media/psh_utils/benchmarks/audio_token_benchmark.cpp
new file mode 100644
index 0000000..47003c0
--- /dev/null
+++ b/media/psh_utils/benchmarks/audio_token_benchmark.cpp
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "audio_token_benchmark"
+#include <utils/Log.h>
+
+#include <psh_utils/Token.h>
+
+#include <benchmark/benchmark.h>
+
+/*
+ Pixel 9 Pro XL
+------------------------------------------------------------------------------------------
+ Benchmark                            Time                      CPU             Iteration
+------------------------------------------------------------------------------------------
+audio_token_benchmark:
+  #BM_ClientToken     494.6548907301575 ns     492.4932166101717 ns      1376819
+  #BM_ThreadToken    140.34316175293938 ns    139.91778452790845 ns      5000397
+  #BM_TrackToken      944.0571625384163 ns     893.7912613357879 ns       643096
+*/
+
+static void BM_ClientToken(benchmark::State& state) {
+    constexpr pid_t kPid = 10;
+    constexpr uid_t kUid = 100;
+    while (state.KeepRunning()) {
+        auto token = android::media::psh_utils::createAudioClientToken(
+                kPid, kUid);
+        benchmark::ClobberMemory();
+    }
+}
+
+BENCHMARK(BM_ClientToken);
+
+static void BM_ThreadToken(benchmark::State& state) {
+    constexpr pid_t kTid = 20;
+    constexpr const char* kWakeLockTag = "thread";
+    while (state.KeepRunning()) {
+        auto token = android::media::psh_utils::createAudioThreadToken(
+                kTid, kWakeLockTag);
+        benchmark::ClobberMemory();
+    }
+}
+
+BENCHMARK(BM_ThreadToken);
+
+static void BM_TrackToken(benchmark::State& state) {
+    constexpr pid_t kPid = 10;
+    constexpr uid_t kUid = 100;
+    auto clientToken = android::media::psh_utils::createAudioClientToken(
+                kPid, kUid);
+    while (state.KeepRunning()) {
+        auto token = android::media::psh_utils::createAudioTrackToken(kUid);
+        benchmark::ClobberMemory();
+    }
+}
+
+BENCHMARK(BM_TrackToken);
+
+BENCHMARK_MAIN();
diff --git a/media/psh_utils/include/psh_utils/AudioPowerManager.h b/media/psh_utils/include/psh_utils/AudioPowerManager.h
new file mode 100644
index 0000000..47dfdb2
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/AudioPowerManager.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "PowerClientStats.h"
+#include "PowerStatsCollector.h"
+#include "Token.h"
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/linked_hash_map.h>
+#include <list>
+#include <map>
+#include <unordered_map>
+#include <unordered_set>
+
+namespace android::media::psh_utils {
+
+/**
+ * AudioPowerManager is a singleton class that
+ * serializes the power, wakelock, and performance
+ * messages
+ */
+class AudioPowerManager {
+    friend class AudioClientToken;
+    friend class AudioThreadToken;
+    friend class AudioTrackToken;
+
+public:
+    static AudioPowerManager& getAudioPowerManager();
+
+    /**
+     * Returns a token indicating that a client is started.
+     * This is associated with an application.
+     */
+    std::unique_ptr<Token> startClient(pid_t pid, uid_t uid,
+            const std::string& additional);
+
+    /**
+     * Returns a token that represents a start instance for uid.
+     * This is typically associated with an AudioTrack / AudioRecord start.
+     */
+    std::unique_ptr<Token> startTrack(uid_t uid, const std::string& additional);
+
+    /**
+     * Returns a token that represents a wakelock for a Thread start.
+     */
+    std::unique_ptr<Token> startThread(
+            pid_t pid, const std::string& wakeLockName,
+            WakeFlag wakeFlag, const std::string& additional);
+
+    std::string toString() const;
+
+    static bool enabled();
+
+private:
+    // For AudioToken dtor only.
+    void clear_token_ptr(Token* token);
+    void stopClient(pid_t pid);
+
+    static constexpr size_t kHistory = 6;
+
+    mutable std::mutex mMutex;
+    std::unordered_set<Token *> mOutstandingTokens GUARDED_BY(mMutex);
+    std::unordered_map<pid_t, uid_t> mPidToUid GUARDED_BY(mMutex);
+    std::map<uid_t, std::shared_ptr<PowerClientStats>> mPowerClientStats GUARDED_BY(mMutex);
+    audio_utils::linked_hash_map<uid_t, std::shared_ptr<PowerClientStats>>
+            mHistoricalClients GUARDED_BY(mMutex);
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/HealthStats.h b/media/psh_utils/include/psh_utils/HealthStats.h
new file mode 100644
index 0000000..d7a8d1a
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/HealthStats.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+
+namespace android::media::psh_utils {
+
+// From hardware/interfaces/health/aidl/android/hardware/health/HealthInfo.aidl
+
+struct HealthStats {
+    /**
+     * Instantaneous battery voltage in millivolts (mV).
+     *
+     * Historically, the unit of this field is microvolts (µV), but all
+     * clients and implementations uses millivolts in practice, making it
+     * the de-facto standard.
+     */
+    double batteryVoltageMillivolts;
+    /**
+     * Battery charge value when it is considered to be "full" in µA-h
+     */
+    double batteryFullChargeUah;
+    /**
+     * Instantaneous battery capacity in µA-h
+     */
+    double batteryChargeCounterUah;
+
+    std::string normalizedEnergy(double time) const;
+
+    bool isValid() const { return batteryVoltageMillivolts > 0; }
+
+    // Returns {seconds, joules, watts} from battery counters
+    std::tuple<float, float, float> energyFrom(const std::string& s) const;
+    std::string toString() const;
+
+    HealthStats operator+=(const HealthStats& other);
+    HealthStats operator-=(const HealthStats& other);
+    HealthStats operator+(const HealthStats& other) const;
+    HealthStats operator-(const HealthStats& other) const;
+    bool operator==(const HealthStats& other) const = default;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PerformanceFixture.h b/media/psh_utils/include/psh_utils/PerformanceFixture.h
new file mode 100644
index 0000000..092a508
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PerformanceFixture.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <audio_utils/threads.h>
+#include <benchmark/benchmark.h>
+#include <psh_utils/PowerStats.h>
+#include <psh_utils/PowerStatsCollector.h>
+
+#include <future>
+
+namespace android::media::psh_utils {
+
+enum CoreClass {
+    CORE_LITTLE,
+    CORE_MID,
+    CORE_BIG,
+};
+
+inline std::string toString(CoreClass coreClass) {
+    switch (coreClass) {
+        case CORE_LITTLE: return "LITTLE";
+        case CORE_MID: return "MID";
+        case CORE_BIG: return "BIG";
+        default: return "UNKNOWN";
+    }
+}
+
+/**
+ * A benchmark fixture is used to specify benchmarks that have a custom SetUp() and
+ * TearDown().  This is **required** for performance testing, as a typical benchmark
+ * method **may be called several times** during a run.
+ *
+ * A fixture ensures that SetUp() and TearDown() and the resulting statistics accumulation
+ * is done only once.   Note: BENCHMARK(BM_func)->Setup(DoSetup)->Teardown(DoTeardown)
+ * does something similar, but it requires some singleton to contain the state properly.
+ */
+class PerformanceFixture : public benchmark::Fixture {
+public:
+    // call this to start the profiling
+    virtual void startProfiler(CoreClass coreClass) {
+        mCores = android::audio_utils::get_number_cpus();
+        if (mCores == 0) return;
+        mCoreClass = coreClass;
+        std::array<unsigned, 3> coreSelection{0U, mCores / 2 + 1, mCores - 1};
+        mCore = coreSelection[std::min((size_t)coreClass, std::size(coreSelection) - 1)];
+
+        auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+        mStartStats = collector.getStats();
+
+        const pid_t tid = gettid(); // us.
+
+        // Possibly change priority to improve benchmarking
+        // android::audio_utils::set_thread_priority(gettid(), 98);
+
+        android::audio_utils::set_thread_affinity(0 /* pid */, 1 << mCore);
+    }
+
+    void TearDown(benchmark::State &state) override {
+        const auto N = state.complexity_length_n();
+        state.counters["N"] = benchmark::Counter(N,
+                benchmark::Counter::kIsIterationInvariantRate, benchmark::Counter::OneK::kIs1024);
+        if (mStartStats) {
+            auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+            const auto stopStats = collector.getStats();
+            android::media::psh_utils::PowerStats diff = *stopStats - *mStartStats;
+            auto cpuEnergy = diff.energyFrom("CPU");
+            auto memEnergy = diff.energyFrom("MEM");
+
+            constexpr float kMwToW = 1e-3;
+            state.counters["WCPU"] = benchmark::Counter(std::get<2>(cpuEnergy) * kMwToW,
+                                                          benchmark::Counter::kDefaults,
+                                                          benchmark::Counter::OneK::kIs1000);
+            state.counters["WMem"] = benchmark::Counter(std::get<2>(memEnergy) * kMwToW,
+                                                          benchmark::Counter::kDefaults,
+                                                          benchmark::Counter::OneK::kIs1000);
+            state.counters["JCPU"] = benchmark::Counter(
+                    std::get<1>(cpuEnergy) / N / state.iterations(), benchmark::Counter::kDefaults,
+                    benchmark::Counter::OneK::kIs1000);
+            state.counters["JMem"] = benchmark::Counter(
+                    std::get<1>(memEnergy) / N / state.iterations(), benchmark::Counter::kDefaults,
+                    benchmark::Counter::OneK::kIs1000);
+        }
+    }
+
+protected:
+    // these are only initialized upon startProfiler.
+    unsigned mCores = 0;
+    int mCore = 0;
+    CoreClass mCoreClass = CORE_LITTLE;
+    std::shared_ptr<const android::media::psh_utils::PowerStats> mStartStats;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PowerClientStats.h b/media/psh_utils/include/psh_utils/PowerClientStats.h
new file mode 100644
index 0000000..6e27e41
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PowerClientStats.h
@@ -0,0 +1,98 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "PowerStats.h"
+#include "PowerStatsCollector.h"
+
+#include <android-base/thread_annotations.h>
+#include <audio_utils/CommandThread.h>
+#include <memory>
+#include <set>
+
+namespace android::media::psh_utils {
+
+/**
+ * PowerClientStats accumulates power measurements based on start and stop events.
+ *
+ * The start and stop events must eventually be matched, but several start events
+ * in a row only results in the power counted once.
+ */
+class PowerClientStats {
+public:
+    // A command thread is used for tokens to dispatch start and stop sequentially
+    // with less overhead to the caller.
+    static audio_utils::CommandThread& getCommandThread();
+
+    /**
+     * Creates an UID based power stat tracker.
+     *
+     * @param uid uid of app
+     * @param additional string to be printed out.
+     */
+    PowerClientStats(uid_t uid, const std::string& additional);
+
+    /**
+     * Starts power tracking.
+     */
+    void start(int64_t actualNs) EXCLUDES(mMutex);
+
+    /**
+     * Stops power tracking (saves the difference) - must be paired with start().
+     */
+    void stop(int64_t actualNs) EXCLUDES(mMutex);
+
+    /**
+     * Adds a pid to the App for string printing.
+     */
+    void addPid(pid_t pid) EXCLUDES(mMutex);
+
+    /**
+     * Removes the pid from the App for string printing.
+     */
+    size_t removePid(pid_t pid) EXCLUDES(mMutex);
+
+    /**
+     * Returns the string info.
+     * @param stats if true returns the stats.
+     * @return stat string.
+     */
+    std::string toString(bool stats = false, const std::string& prefix = {})
+            const EXCLUDES(mMutex);
+
+private:
+    // Snapshots are taken no more often than 500ms.
+    static constexpr int64_t kStatTimeToleranceNs = 500'000'000;
+
+    mutable std::mutex mMutex;
+    const uid_t mUid;
+    const std::string mName;
+    const std::string mAdditional;
+    std::set<pid_t> mPids GUARDED_BY(mMutex); // pids sharing same uid
+    int64_t mTokenCount GUARDED_BY(mMutex) = 0;
+    int64_t mStartNs GUARDED_BY(mMutex) = 0;
+    std::shared_ptr<const PowerStats> mStartStats GUARDED_BY(mMutex);
+
+    // Cumulative time while active: sum of deltas of (stop - start).
+    int64_t mCumulativeNs GUARDED_BY(mMutex) = 0;
+    // Cumulative stats while active: sum of deltas of (stop - start),
+    // where snapshots are quantized to ~500ms accuracy.
+    std::shared_ptr<PowerStats> mCumulativeStats GUARDED_BY(mMutex) =
+            std::make_shared<PowerStats>();
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PowerStats.h b/media/psh_utils/include/psh_utils/PowerStats.h
new file mode 100644
index 0000000..ae48606
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PowerStats.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "HealthStats.h"
+
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+namespace android::media::psh_utils {
+
+// See powerstats_util.proto and powerstats_util.pb.h
+
+struct PowerStats {
+    struct Metadata {
+        // Represents the start time measured in milliseconds since boot of the
+        // interval or point in time when stats were gathered.
+        uint64_t start_time_since_boot_ms;
+
+        // Represents the start time measured in milliseconds since epoch of the
+        // interval or point in time when stats were gathered.
+        uint64_t start_time_epoch_ms;
+
+        // In monotonic clock.
+        uint64_t start_time_monotonic_ms;
+
+        // If PowerStats represent an interval, the duration field will be set will
+        // the millisecond duration of stats collection. It will be unset for point
+        // stats.
+        // This is in boottime.
+        uint64_t duration_ms;
+
+        // This is in monotonic time, and does not include suspend.
+        uint64_t duration_monotonic_ms;
+
+        std::string toString() const;
+
+        Metadata operator+=(const Metadata& other);
+        Metadata operator-=(const Metadata& other);
+        Metadata operator+(const Metadata& other) const;
+        Metadata operator-(const Metadata& other) const;
+        bool operator==(const Metadata& other) const = default;
+    };
+
+    struct StateResidency {
+        std::string entity_name;
+        std::string state_name;
+        uint64_t time_ms;
+        uint64_t entry_count;
+
+        std::string toString() const;
+
+        StateResidency operator+=(const StateResidency& other);
+        StateResidency operator-=(const StateResidency& other);
+        StateResidency operator+(const StateResidency& other) const;
+        StateResidency operator-(const StateResidency& other) const;
+        bool operator==(const StateResidency& other) const = default;
+    };
+
+    struct RailEnergy {
+        std::string subsystem_name;
+        std::string rail_name;
+        uint64_t energy_uws;
+
+        std::string toString() const;
+        RailEnergy operator+=(const RailEnergy& other);
+        RailEnergy operator-=(const RailEnergy& other);
+        RailEnergy operator+(const RailEnergy& other) const;
+        RailEnergy operator-(const RailEnergy& other) const;
+        bool operator==(const RailEnergy& other) const = default;
+    };
+
+    HealthStats health_stats;
+
+    std::string normalizedEnergy(const std::string& prefix = {}) const;
+
+    // Returns {seconds, joules, watts} from all rails containing a matching string.
+    std::tuple<float, float, float> energyFrom(const std::string& railMatcher) const;
+    std::string toString(const std::string& prefix = {}) const;
+
+    PowerStats operator+=(const PowerStats& other);
+    PowerStats operator-=(const PowerStats& other);
+    PowerStats operator+(const PowerStats& other) const;
+    PowerStats operator-(const PowerStats& other) const;
+    bool operator==(const PowerStats& other) const = default;
+
+    Metadata metadata{};
+    // These are sorted by name.
+    std::vector<StateResidency> power_entity_state_residency;
+    std::vector<RailEnergy> rail_energy;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PowerStatsCollector.h b/media/psh_utils/include/psh_utils/PowerStatsCollector.h
new file mode 100644
index 0000000..e3f8ea8
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PowerStatsCollector.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "PowerStats.h"
+#include <android-base/thread_annotations.h>
+#include <memory>
+#include <utils/Errors.h> // status_t
+
+namespace android::media::psh_utils {
+
+// Internal providers that fill up the PowerStats state object.
+class PowerStatsProvider {
+public:
+    virtual ~PowerStatsProvider() = default;
+    virtual status_t fill(PowerStats* stat) const = 0;
+};
+
+class PowerStatsCollector {
+public:
+    // singleton getter
+    static PowerStatsCollector& getCollector();
+
+    // Returns a snapshot of the state.
+    // If toleranceNs > 0, we permit the use of a stale snapshot taken within that tolerance.
+    std::shared_ptr<const PowerStats> getStats(int64_t toleranceNs = 0)
+            EXCLUDES(mMutex, mMutexExclusiveFill);
+
+private:
+    PowerStatsCollector();  // use the singleton getter
+
+    // Returns non-empty PowerStats if we have a previous stats snapshot within toleranceNs.
+    std::shared_ptr<const PowerStats> checkLastStats(int64_t toleranceNs) const EXCLUDES(mMutex);
+    int fill(PowerStats* stats) const;
+    void addProvider(std::unique_ptr<PowerStatsProvider>&& powerStatsProvider);
+
+    mutable std::mutex mMutexExclusiveFill;
+    mutable std::mutex mMutex;
+    // addProvider is called in the ctor, so effectively const.
+    std::vector<std::unique_ptr<PowerStatsProvider>> mPowerStatsProviders;
+    int64_t mLastFetchNs GUARDED_BY(mMutex) = 0;
+    std::shared_ptr<const PowerStats> mLastFetchStats GUARDED_BY(mMutex);
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/ServiceSingleton.h b/media/psh_utils/include/psh_utils/ServiceSingleton.h
new file mode 100644
index 0000000..d0cd6d2
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/ServiceSingleton.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_manager.h>
+#include <android-base/thread_annotations.h>
+#include <mutex>
+#include <utils/Log.h>
+#include <utils/Timers.h>
+
+namespace android::media::psh_utils {
+
+struct DefaultServiceTraits {
+    static constexpr int64_t kThresholdRetryNs = 1'000'000'000;
+    static constexpr int64_t kMaxRetries = 5;
+    static constexpr const char* kServiceVersion = "/default";
+    static constexpr bool kShowLog = true;
+};
+
+template<typename Service, typename ServiceTraits = DefaultServiceTraits>
+std::shared_ptr<Service> getServiceSingleton() {
+    [[clang::no_destroy]] static constinit std::mutex m;
+    [[clang::no_destroy]] static constinit std::shared_ptr<Service> service GUARDED_BY(m);
+    static int64_t nextTryNs GUARDED_BY(m) = 0;
+    static int64_t tries GUARDED_BY(m) = 0;
+
+    std::lock_guard l(m);
+    if (service
+            || tries > ServiceTraits::kMaxRetries  // try too many times
+            || systemTime(SYSTEM_TIME_BOOTTIME) < nextTryNs) {  // try too frequently.
+        return service;
+    }
+
+    const auto serviceName = std::string(Service::descriptor)
+            .append(ServiceTraits::kServiceVersion);
+    service = Service::fromBinder(
+            ::ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+
+    if (!service) {
+        // If failed, set a time limit before retry.
+        // No need to log an error, it is already done.
+        nextTryNs = systemTime(SYSTEM_TIME_BOOTTIME) + ServiceTraits::kThresholdRetryNs;
+        ALOGV_IF(ServiceTraits::kShowLog, "service:%s  retries:%lld of %lld  nextTryNs:%lld",
+                Service::descriptor, (long long)tries,
+                (long long)kMaxRetries, (long long)nextTryNs);
+        ++tries;
+    }
+
+    return service;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/Token.h b/media/psh_utils/include/psh_utils/Token.h
new file mode 100644
index 0000000..2b52d11
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/Token.h
@@ -0,0 +1,62 @@
+
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+
+namespace android::media::psh_utils {
+
+class Token {
+public:
+    virtual ~Token() = default;
+    virtual std::string toString() const = 0;
+};
+
+// Client tokens (one per Audio Client PID)
+std::unique_ptr<Token> createAudioClientToken(pid_t pid, uid_t uid,
+        const std::string& additional = {});
+
+enum class WakeFlag {
+    kNone = 0,
+    kLowLatency = 1,
+    kLowPower = 2,
+};
+
+inline std::string toString(WakeFlag wakeFlag) {
+    std::string result;
+    for (const auto& [flag, name] : std::initializer_list<std::pair<WakeFlag, std::string>> {
+            {WakeFlag::kLowLatency, "kLowLatency"},
+            {WakeFlag::kLowPower, "kLowPower"},
+        }) {
+        if (static_cast<int>(flag) & static_cast<int>(wakeFlag)) {
+            if (!result.empty()) result.append("|");
+            result.append(name);
+        }
+    }
+    return result;
+}
+
+// Thread tokens (one per ThreadBase PID started).
+std::unique_ptr<Token> createAudioThreadToken(
+        pid_t pid, const std::string& wakeLockName,
+        WakeFlag wakeFlag = WakeFlag::kNone, const std::string& additional = {});
+
+// AudioTrack/AudioRecord tokens.
+std::unique_ptr<Token> createAudioTrackToken(uid_t uid, const std::string& additional = {});
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/tests/Android.bp b/media/psh_utils/tests/Android.bp
new file mode 100644
index 0000000..74589f8
--- /dev/null
+++ b/media/psh_utils/tests/Android.bp
@@ -0,0 +1,26 @@
+package {
+    default_team: "trendy_team_media_framework_audio",
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "powerstats_collector_tests",
+    srcs: [
+        "powerstats_collector_tests.cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    static_libs: [
+        "libpshutils",
+    ],
+}
diff --git a/media/psh_utils/tests/powerstats_collector_tests.cpp b/media/psh_utils/tests/powerstats_collector_tests.cpp
new file mode 100644
index 0000000..35c264a
--- /dev/null
+++ b/media/psh_utils/tests/powerstats_collector_tests.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <psh_utils/PowerStatsCollector.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::media::psh_utils;
+
+template <typename T>
+void inRange(const T& a, const T& b, const T& c) {
+    ASSERT_GE(a, std::min(b, c));
+    ASSERT_LE(a, std::max(b, c));
+}
+
+TEST(powerstat_collector_tests, basic) {
+    auto& psc = PowerStatsCollector::getCollector();
+
+    // This test is used for debugging the string through logcat, we validate a non-empty string.
+    auto powerStats = psc.getStats();
+    ALOGD("%s: %s", __func__, powerStats->toString().c_str());
+    EXPECT_FALSE(powerStats->toString().empty());
+}
+
+TEST(powerstat_collector_tests, metadata) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kDurationMs1 = 5;
+    constexpr uint64_t kDurationMs2 = 10;
+    ps1.metadata.duration_ms = kDurationMs1;
+    ps2.metadata.duration_ms = kDurationMs2;
+
+    constexpr uint64_t kDurationMonotonicMs1 = 3;
+    constexpr uint64_t kDurationMonotonicMs2 = 9;
+    ps1.metadata.duration_monotonic_ms = kDurationMonotonicMs1;
+    ps2.metadata.duration_monotonic_ms = kDurationMonotonicMs2;
+
+    constexpr uint64_t kStartTimeSinceBootMs1 = 1616;
+    constexpr uint64_t kStartTimeEpochMs1 = 1121;
+    constexpr uint64_t kStartTimeMonotonicMs1 = 1525;
+    constexpr uint64_t kStartTimeSinceBootMs2 = 2616;
+    constexpr uint64_t kStartTimeEpochMs2 = 2121;
+    constexpr uint64_t kStartTimeMonotonicMs2 = 2525;
+
+    ps1.metadata.start_time_since_boot_ms = kStartTimeSinceBootMs1;
+    ps1.metadata.start_time_epoch_ms = kStartTimeEpochMs1;
+    ps1.metadata.start_time_monotonic_ms = kStartTimeMonotonicMs1;
+    ps2.metadata.start_time_since_boot_ms = kStartTimeSinceBootMs2;
+    ps2.metadata.start_time_epoch_ms = kStartTimeEpochMs2;
+    ps2.metadata.start_time_monotonic_ms = kStartTimeMonotonicMs2;
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kDurationMs1 + kDurationMs2,
+            ps3.metadata.duration_ms);
+    EXPECT_EQ(kDurationMonotonicMs1 + kDurationMonotonicMs2,
+            ps3.metadata.duration_monotonic_ms);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_since_boot_ms,
+            kStartTimeSinceBootMs1, kStartTimeSinceBootMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_epoch_ms,
+            kStartTimeEpochMs1, kStartTimeEpochMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_monotonic_ms,
+            kStartTimeMonotonicMs1, kStartTimeMonotonicMs2));
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kDurationMs2 - kDurationMs1,
+            ps5.metadata.duration_ms);
+    EXPECT_EQ(kDurationMonotonicMs2 - kDurationMonotonicMs1,
+            ps5.metadata.duration_monotonic_ms);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_since_boot_ms,
+            kStartTimeSinceBootMs1, kStartTimeSinceBootMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_epoch_ms,
+            kStartTimeEpochMs1, kStartTimeEpochMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_monotonic_ms,
+         kStartTimeMonotonicMs1, kStartTimeMonotonicMs2));
+}
+
+TEST(powerstat_collector_tests, state_residency) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kTimeMs1 = 5;
+    constexpr uint64_t kTimeMs2 = 10;
+    constexpr uint64_t kEntryCount1 = 15;
+    constexpr uint64_t kEntryCount2 = 18;
+
+    ps1.power_entity_state_residency.emplace_back(
+            PowerStats::StateResidency{"", "", kTimeMs1, kEntryCount1});
+    ps2.power_entity_state_residency.emplace_back(
+            PowerStats::StateResidency{"", "", kTimeMs2, kEntryCount2});
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kTimeMs1 + kTimeMs2,
+            ps3.power_entity_state_residency[0].time_ms);
+    EXPECT_EQ(kEntryCount1 + kEntryCount2,
+            ps3.power_entity_state_residency[0].entry_count);
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kTimeMs2 - kTimeMs1,
+            ps5.power_entity_state_residency[0].time_ms);
+    EXPECT_EQ(kEntryCount2 - kEntryCount1,
+            ps5.power_entity_state_residency[0].entry_count);
+}
+
+TEST(powerstat_collector_tests, rail_energy) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kEnergyUws1 = 5;
+    constexpr uint64_t kEnergyUws2 = 10;
+
+    ps1.rail_energy.emplace_back(
+            PowerStats::RailEnergy{"", "", kEnergyUws1});
+    ps2.rail_energy.emplace_back(
+            PowerStats::RailEnergy{"", "", kEnergyUws2});
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kEnergyUws1 + kEnergyUws2,
+            ps3.rail_energy[0].energy_uws);
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kEnergyUws2 - kEnergyUws1,
+            ps5.rail_energy[0].energy_uws);
+}
+
+TEST(powerstat_collector_tests, health_stats) {
+    PowerStats ps1, ps2;
+
+    constexpr double kBatteryChargeCounterUah1 = 21;
+    constexpr double kBatteryChargeCounterUah2 = 25;
+    ps1.health_stats.batteryChargeCounterUah = kBatteryChargeCounterUah1;
+    ps2.health_stats.batteryChargeCounterUah = kBatteryChargeCounterUah2;
+
+    constexpr double kBatteryFullChargeUah1 = 32;
+    constexpr double kBatteryFullChargeUah2 = 33;
+    ps1.health_stats.batteryFullChargeUah = kBatteryFullChargeUah1;
+    ps2.health_stats.batteryFullChargeUah = kBatteryFullChargeUah2;
+
+    constexpr double kBatteryVoltageMillivolts1 = 42;
+    constexpr double kBatteryVoltageMillivolts2 = 43;
+    ps1.health_stats.batteryVoltageMillivolts = kBatteryVoltageMillivolts1;
+    ps2.health_stats.batteryVoltageMillivolts = kBatteryVoltageMillivolts2;
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kBatteryChargeCounterUah1 + kBatteryChargeCounterUah2,
+              ps3.health_stats.batteryChargeCounterUah);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.health_stats.batteryFullChargeUah,
+             kBatteryFullChargeUah1, kBatteryFullChargeUah2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.health_stats.batteryVoltageMillivolts,
+             kBatteryVoltageMillivolts1, kBatteryVoltageMillivolts2));
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kBatteryChargeCounterUah2 - kBatteryChargeCounterUah1,
+              ps5.health_stats.batteryChargeCounterUah);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.health_stats.batteryFullChargeUah,
+            kBatteryFullChargeUah1, kBatteryFullChargeUah2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.health_stats.batteryVoltageMillivolts,
+            kBatteryVoltageMillivolts1, kBatteryVoltageMillivolts2));
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/Android.bp b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
index d41a7f9..8f9ee86 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/Android.bp
+++ b/media/tests/benchmark/MediaBenchmarkTest/Android.bp
@@ -36,8 +36,8 @@
     resource_dirs: ["res"],
 
     libs: [
-        "android.test.runner",
-        "android.test.base",
+        "android.test.runner.stubs.system",
+        "android.test.base.stubs.system",
     ],
 
     jni_libs: [
@@ -69,6 +69,6 @@
 java_defaults {
     name: "MediaBenchmark-defaults",
 
-    min_sdk_version: "29",
-    target_sdk_version: "30",
+    min_sdk_version: "35",
+    target_sdk_version: "35",
 }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml b/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
index 28c2654..bc0c16f 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
+++ b/media/tests/benchmark/MediaBenchmarkTest/AndroidManifest.xml
@@ -26,7 +26,7 @@
         tools:ignore="AllowBackup,GoogleAppIndexingWarning,MissingApplicationIcon"
         tools:remove="android:appComponentFactory">
     </application>
-    <uses-sdk android:minSdkVersion="29" android:targetSdkVersion="31"/>
+    <uses-sdk android:minSdkVersion="35" android:targetSdkVersion="35"/>
     <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
             android:targetPackage="com.android.media.benchmark"
             android:label="Benchmark Media Test"/>
diff --git a/media/tests/benchmark/MediaBenchmarkTest/build.gradle b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
index a2af701..87fc24c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/build.gradle
+++ b/media/tests/benchmark/MediaBenchmarkTest/build.gradle
@@ -27,11 +27,11 @@
 apply plugin: 'com.android.application'
 
 android {
-    compileSdkVersion 30
+    compileSdkVersion 35
     defaultConfig {
         applicationId "com.android.media.benchmark"
         minSdkVersion 29
-        targetSdkVersion 30
+        targetSdkVersion 35
         versionCode 1
         versionName "1.0"
         testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
index afd70a3..c68a990 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/DecoderTest.java
@@ -46,6 +46,7 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.List;
 
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertEquals;
@@ -118,7 +119,7 @@
     }
 
     @Test(timeout = PER_TEST_TIMEOUT_MS)
-    public void testDecoder() throws IOException {
+    public void testDecoder() throws IOException, InterruptedException {
         File inputFile = new File(mInputFilePath + mInputFile);
         assertTrue("Cannot find " + mInputFile + " in directory " + mInputFilePath,
                 inputFile.exists());
@@ -133,7 +134,7 @@
             extractor.selectExtractorTrack(currentTrack);
             MediaFormat format = extractor.getFormat(currentTrack);
             String mime = format.getString(MediaFormat.KEY_MIME);
-            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, false);
+            List<String> mediaCodecs = CodecUtils.selectCodecs(mime, false);
             assertTrue("No suitable codecs found for file: " + mInputFile + " track : " +
                     currentTrack + " mime: " + mime, (mediaCodecs.size() > 0));
 
@@ -205,7 +206,7 @@
             extractor.selectExtractorTrack(currentTrack);
             MediaFormat format = extractor.getFormat(currentTrack);
             String mime = format.getString(MediaFormat.KEY_MIME);
-            ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mime, false);
+            List<String> mediaCodecs = CodecUtils.selectCodecs(mime, false);
             for (String codecName : mediaCodecs) {
                 Log.i("Test: %s\n", mInputFile);
                 Native nativeDecoder = new Native();
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
index 4202732..4ce5214 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/androidTest/java/com/android/media/benchmark/tests/EncoderTest.java
@@ -50,6 +50,7 @@
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.List;
 
 import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.assertEquals;
@@ -150,7 +151,7 @@
     }
 
     @BeforeClass
-    public static void prepareInput() throws IOException {
+    public static void prepareInput() throws IOException, InterruptedException {
 
         mDecodedFileFullHd = new File(mFileDirPath + DECODE_FULLHD_UNPACKED);
         int status = decodeFile(mInputFilePath + DECODE_FULLHD_INPUT, mDecodedFileFullHd);
@@ -165,7 +166,8 @@
         assertEquals("Decoder returned error " + status, 0, status);
     }
 
-    private static int decodeFile(String inputFileName, File outputDecodeFile) throws IOException {
+    private static int decodeFile(String inputFileName, File outputDecodeFile)
+            throws IOException, InterruptedException {
         int status = -1;
         File inputFile = new File(inputFileName);
         assertTrue("Cannot open input file " + inputFileName, inputFile.exists());
@@ -220,7 +222,7 @@
         int status;
         int frameSize;
 
-        ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mMime, true);
+        List<String> mediaCodecs = CodecUtils.selectCodecs(mMime, true);
         assertTrue("No suitable codecs found for mimetype: " + mMime, (mediaCodecs.size() > 0));
         Boolean[] encodeMode = {true, false};
         // Encoding the decoded input file
@@ -297,7 +299,7 @@
 
     @Test(timeout = PER_TEST_TIMEOUT_MS)
     public void testNativeEncoder() {
-        ArrayList<String> mediaCodecs = CodecUtils.selectCodecs(mMime, true);
+        List<String> mediaCodecs = CodecUtils.selectCodecs(mMime, true);
         assertTrue("No suitable codecs found for mimetype: " + mMime, (mediaCodecs.size() > 0));
         for (String codecName : mediaCodecs) {
             Native nativeEncoder = new Native();
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
index 1e10b37..f223242 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/CodecUtils.java
@@ -5,6 +5,7 @@
 import android.media.MediaFormat;
 import android.os.Build;
 import java.util.ArrayList;
+import java.util.List;
 
 public class CodecUtils {
     private CodecUtils() {}
@@ -15,7 +16,7 @@
      * @param isEncoder Specifies encoder or decoder
      * @return ArrayList of codec names
      */
-    public static ArrayList<String> selectCodecs(String mimeType, boolean isEncoder) {
+    public static List<String> selectCodecs(String mimeType, boolean isEncoder) {
         MediaCodecList codecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
         MediaCodecInfo[] codecInfos = codecList.getCodecInfos();
         ArrayList<String> supportedCodecs = new ArrayList<>();
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
index e947ef6..e9b337d 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Decoder.java
@@ -28,7 +28,10 @@
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
 import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
 
 import com.android.media.benchmark.library.IBufferXfer;
 
@@ -37,28 +40,28 @@
     private static final boolean DEBUG = false;
     private static final int kQueueDequeueTimeoutUs = 1000;
 
-    private final Object mLock = new Object();
-    private MediaCodec mCodec;
-    private Surface mSurface = null;
-    private boolean mRender = false;
-    private ArrayList<BufferInfo> mInputBufferInfo;
-    private Stats mStats;
-    private String mMime;
+    protected final Object mLock = new Object();
+    protected MediaCodec mCodec;
+    protected Surface mSurface = null;
+    protected boolean mRender = false;
+    protected ArrayList<BufferInfo> mInputBufferInfo;
+    protected Stats mStats;
+    protected String mMime;
 
-    private boolean mSawInputEOS;
-    private boolean mSawOutputEOS;
-    private boolean mSignalledError;
+    protected boolean mSawInputEOS;
+    protected boolean mSawOutputEOS;
+    protected boolean mSignalledError;
 
-    private int mNumInFramesProvided;
-    private int mNumInFramesRequired;
+    protected int mNumInFramesProvided;
+    protected int mNumInFramesRequired;
 
-    private int mNumOutputFrame;
-    private int mIndex;
+    protected int mNumOutputFrame;
+    protected int mIndex;
 
-    private ArrayList<ByteBuffer> mInputBuffer;
-    private FileOutputStream mOutputStream;
-    private FrameReleaseQueue mFrameReleaseQueue = null;
-    private IBufferXfer.ISendBuffer mIBufferSend = null;
+    protected ArrayList<ByteBuffer> mInputBuffer;
+    protected FileOutputStream mOutputStream;
+    protected FrameReleaseQueue mFrameReleaseQueue = null;
+    protected IBufferXfer.ISendBuffer mIBufferSend = null;
 
     /* success for decoder */
     public static final int DECODE_SUCCESS = 0;
@@ -71,7 +74,9 @@
     @Override
     public boolean receiveBuffer(IBufferXfer.BufferXferInfo info) {
         MediaCodec codec = (MediaCodec)info.obj;
-        codec.releaseOutputBuffer(info.idx, mRender);
+        if (info.isComplete) {
+            codec.releaseOutputBuffer(info.idx, mRender);
+        }
         return true;
     }
     @Override
@@ -133,6 +138,49 @@
         }
     }
 
+    protected void setCallback(MediaCodec codec) {
+        codec.setCallback(new MediaCodec.Callback() {
+        @Override
+        public void onInputBufferAvailable(
+                @NonNull MediaCodec mediaCodec, int inputBufferId) {
+            try {
+                mStats.addInputTime();
+                onInputAvailable(inputBufferId, mediaCodec);
+            } catch (Exception e) {
+                e.printStackTrace();
+                Log.e(TAG, e.toString());
+            }
+        }
+
+        @Override
+        public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec,
+                int outputBufferId, @NonNull MediaCodec.BufferInfo bufferInfo) {
+            mStats.addOutputTime();
+            onOutputAvailable(mediaCodec, outputBufferId, bufferInfo);
+            if (mSawOutputEOS) {
+                synchronized (mLock) { mLock.notify(); }
+            }
+        }
+
+        @Override
+        public void onOutputFormatChanged(
+                @NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
+            Log.i(TAG, "Output format changed. Format: " + format.toString());
+        }
+
+        @Override
+        public void onError(
+                @NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
+            mSignalledError = true;
+            Log.e(TAG, "Codec Error: " + e.toString());
+            e.printStackTrace();
+            synchronized (mLock) { mLock.notify(); }
+        }
+    });
+
+
+    }
+
     /**
      * Decodes the given input buffer,
      * provided valid list of buffer info and format are passed as inputs.
@@ -146,9 +194,10 @@
      *         DECODE_CREATE_ERROR for decoder not created
      * @throws IOException if the codec cannot be created.
      */
-    public int decode(@NonNull ArrayList<ByteBuffer> inputBuffer,
-            @NonNull ArrayList<BufferInfo> inputBufferInfo, final boolean asyncMode,
-            @NonNull MediaFormat format, String codecName) throws IOException {
+    public int decode(@NonNull List<ByteBuffer> inputBuffer,
+            @NonNull List<BufferInfo> inputBufferInfo, final boolean asyncMode,
+            @NonNull MediaFormat format, String codecName)
+            throws IOException, InterruptedException {
         mInputBuffer = new ArrayList<>(inputBuffer.size());
         mInputBuffer.addAll(inputBuffer);
         mInputBufferInfo = new ArrayList<>(inputBufferInfo.size());
@@ -170,64 +219,34 @@
             mFrameReleaseQueue.setMediaCodec(mCodec);
             mFrameReleaseQueue.setMime(mMime);
         }
+
         if (asyncMode) {
-            mCodec.setCallback(new MediaCodec.Callback() {
-                @Override
-                public void onInputBufferAvailable(
-                        @NonNull MediaCodec mediaCodec, int inputBufferId) {
-                    try {
-                        mStats.addInputTime();
-                        onInputAvailable(inputBufferId, mediaCodec);
-                    } catch (Exception e) {
-                        e.printStackTrace();
-                        Log.e(TAG, e.toString());
-                    }
-                }
-
-                @Override
-                public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec,
-                        int outputBufferId, @NonNull MediaCodec.BufferInfo bufferInfo) {
-                    mStats.addOutputTime();
-                    onOutputAvailable(mediaCodec, outputBufferId, bufferInfo);
-                    if (mSawOutputEOS) {
-                        synchronized (mLock) { mLock.notify(); }
-                    }
-                }
-
-                @Override
-                public void onOutputFormatChanged(
-                        @NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
-                    Log.i(TAG, "Output format changed. Format: " + format.toString());
-                }
-
-                @Override
-                public void onError(
-                        @NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
-                    mSignalledError = true;
-                    Log.e(TAG, "Codec Error: " + e.toString());
-                    e.printStackTrace();
-                    synchronized (mLock) { mLock.notify(); }
-                }
-            });
+            setCallback(mCodec);
         }
         int isEncoder = 0;
         if (DEBUG) {
             Log.d(TAG, "Media Format : " + format.toString());
         }
         mCodec.configure(format, mSurface, null, isEncoder);
+
         mCodec.start();
-        Log.i(TAG, "Codec started ");
+        Log.i(TAG, "Codec started async mode ?  " + asyncMode);
         long eTime = mStats.getCurTime();
         mStats.setInitTime(mStats.getTimeDiff(sTime, eTime));
         mStats.setStartTime();
         if (asyncMode) {
             try {
-                synchronized (mLock) { mLock.wait(); }
-                if (mSignalledError) {
-                    return DECODE_DECODER_ERROR;
+                synchronized (mLock) {
+                    while (!mSawOutputEOS && !mSignalledError) {
+                        mLock.wait();
+                    }
+                    if (mSignalledError) {
+                        return DECODE_DECODER_ERROR;
+                    }
                 }
             } catch (InterruptedException e) {
-                e.printStackTrace();
+                Log.e(TAG, "Error in waiting");
+                throw e;
             }
         } else {
             while (!mSawOutputEOS && !mSignalledError) {
@@ -319,7 +338,7 @@
         return mCodec.getOutputFormat();
     }
 
-    private void onInputAvailable(int inputBufferId, MediaCodec mediaCodec) {
+    protected void onInputAvailable(int inputBufferId, MediaCodec mediaCodec) {
         if (inputBufferId >= 0) {
             ByteBuffer inputCodecBuffer = mediaCodec.getInputBuffer(inputBufferId);
             BufferInfo bufInfo;
@@ -351,7 +370,7 @@
         }
     }
 
-    private void onOutputAvailable(
+    protected void onOutputAvailable(
             MediaCodec mediaCodec, int outputBufferId, BufferInfo outputBufferInfo) {
         if (mSawOutputEOS || outputBufferId < 0) {
             return;
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
index 63d17ee..3aa38d1 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Encoder.java
@@ -200,7 +200,8 @@
      * @throws IOException If the codec cannot be created.
      */
     public int encode(String codecName, MediaFormat encodeFormat, String mime, int frameRate,
-                      int sampleRate, int frameSize, boolean asyncMode) throws IOException {
+            int sampleRate, int frameSize, boolean asyncMode)
+            throws IOException, InterruptedException {
         mInputBufferSize = (mInputStream != null) ? mInputStream.getChannel().size() : 0;
         mOffset = 0;
         mFrameRate = frameRate;
@@ -275,12 +276,16 @@
         mStats.setStartTime();
         if (asyncMode) {
             try {
-                synchronized (mLock) { mLock.wait(); }
-                if (mSignalledError) {
-                    return ENCODE_ENCODER_ERROR;
+                synchronized (mLock) {
+                    while (!mSawOutputEOS && !mSignalledError) {
+                        mLock.wait();
+                    }
+                    if (mSignalledError) {
+                        return ENCODE_ENCODER_ERROR;
+                    }
                 }
             } catch (InterruptedException e) {
-                e.printStackTrace();
+                throw e;
             }
         } else {
             while (!mSawOutputEOS && !mSignalledError) {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Extractor.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Extractor.java
index f3024e7..1c0f810 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Extractor.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Extractor.java
@@ -76,6 +76,21 @@
     public MediaCodec.BufferInfo getBufferInfo() { return this.mBufferInfo; }
 
     /**
+     * Returns the maximum sample size for the selected track
+     * @return max sample size in the given track
+     */
+    public int getMaxSampleSize() {
+        int size = 0;
+        int maxSampleSize = 0;
+        while ((size = (int) mExtractor.getSampleSize()) != -1) {
+            maxSampleSize = Math.max(maxSampleSize, size);
+            mExtractor.advance();
+        }
+        mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+        return maxSampleSize;
+    }
+
+    /**
      * Returns the duration of the sample
      */
     public long getClipDuration() { return this.mDurationUs; }
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
index 90731ed..20a2573 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/FrameReleaseQueue.java
@@ -185,7 +185,7 @@
                 try {
                     mCodec.releaseOutputBuffer(curFrameInfo.bufferId, actualRender);
                 } catch (IllegalStateException e) {
-                    e.printStackTrace();
+                    throw(e);
                 }
             });
 
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
index a75962c..c97a35c 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXfer.java
@@ -28,6 +28,7 @@
       public Object obj;
       int flag;
       int bytesRead;
+      boolean isComplete = true;
       long presentationTimeUs;
   }
 
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
index ab55df5..3e6cee1 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/IBufferXferImpl.java
@@ -16,9 +16,9 @@
 
 package com.android.media.benchmark.library;
 
-/**
+/*
  * Class that manages the buffer senders
-*/
+ */
 import com.android.media.benchmark.library.IBufferXfer;
 import java.util.ArrayDeque;
 import android.util.Log;
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java
new file mode 100644
index 0000000..cb92f06
--- /dev/null
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/MultiAccessUnitDecoder.java
@@ -0,0 +1,223 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.media.benchmark.library;
+
+import android.view.Surface;
+
+import android.media.MediaCodec;
+import android.media.MediaCodec.BufferInfo;
+import android.media.MediaFormat;
+import android.util.Log;
+
+import androidx.annotation.NonNull;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import com.android.media.benchmark.library.IBufferXfer;
+import com.android.media.benchmark.library.Decoder;
+
+public class MultiAccessUnitDecoder extends Decoder {
+    private static final String TAG = "MultiAccessUnitDecoder";
+    private static final boolean DEBUG = false;
+    private final ArrayDeque<BufferInfo> mInputInfos = new ArrayDeque<>();
+
+    @Override
+    public void setCallback(MediaCodec codec) {
+        mCodec.setCallback(new MediaCodec.Callback() {
+            boolean isUsingLargeFrameMode = false;
+
+            @Override
+            public void onInputBufferAvailable(
+                    @NonNull MediaCodec mediaCodec, int inputBufferId) {
+                try {
+                    mStats.addInputTime();
+                    if (isUsingLargeFrameMode) {
+                        onInputsAvailable(inputBufferId, mediaCodec);
+                    } else {
+                        onInputAvailable(inputBufferId, mediaCodec);
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                    Log.e(TAG, e.toString());
+                }
+            }
+
+            @Override
+            public void onOutputBufferAvailable(@NonNull MediaCodec mediaCodec,
+                    int outputBufferId, @NonNull MediaCodec.BufferInfo bufferInfo) {
+                mStats.addOutputTime();
+                onOutputAvailable(mediaCodec, outputBufferId, bufferInfo);
+                if (mSawOutputEOS) {
+                    synchronized (mLock) { mLock.notify(); }
+                }
+            }
+
+            @Override
+            public void onOutputBuffersAvailable(
+                    @NonNull MediaCodec mediaCodec,
+                            int outputBufferId, @NonNull ArrayDeque<BufferInfo> infos) {
+                int i = 0;
+                while(i++ < infos.size()) {
+                    mStats.addOutputTime();
+                }
+                onOutputsAvailable(mediaCodec, outputBufferId, infos);
+                if (mSawOutputEOS) {
+                    synchronized (mLock) { mLock.notify(); }
+                }
+            }
+
+            @Override
+            public void onOutputFormatChanged(
+                    @NonNull MediaCodec mediaCodec, @NonNull MediaFormat format) {
+                Log.i(TAG, "Output format changed. Format: " + format.toString());
+                final int maxOutputSize = format.getNumber(
+                        MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, 0).intValue();
+                isUsingLargeFrameMode = (maxOutputSize > 0);
+            }
+
+            @Override
+            public void onError(
+                    @NonNull MediaCodec mediaCodec, @NonNull MediaCodec.CodecException e) {
+                mSignalledError = true;
+                Log.e(TAG, "Codec Error: " + e.toString());
+                e.printStackTrace();
+                synchronized (mLock) { mLock.notify(); }
+            }
+        });
+
+    }
+    /**
+     * Decodes the given input buffer,
+     * provided valid list of buffer info and format are passed as inputs.
+     *
+     * @param inputBuffer     Decode the provided list of ByteBuffers
+     * @param inputBufferInfo List of buffer info corresponding to provided input buffers
+     * @param asyncMode       Will run on async implementation if true
+     * @param format          For creating the decoder if codec name is empty and configuring it
+     * @param codecName       Will create the decoder with codecName
+     * @return DECODE_SUCCESS if decode was successful, DECODE_DECODER_ERROR for fail,
+     *         DECODE_CREATE_ERROR for decoder not created
+     * @throws IOException if the codec cannot be created.
+     */
+    @Override
+    public int decode(@NonNull List<ByteBuffer> inputBuffer,
+            @NonNull List<BufferInfo> inputBufferInfo, final boolean asyncMode,
+            @NonNull MediaFormat format, String codecName)
+            throws IOException, InterruptedException {
+        return super.decode(inputBuffer, inputBufferInfo, asyncMode, format, codecName);
+    }
+
+    private void onInputsAvailable(int inputBufferId, MediaCodec mediaCodec) {
+        if (inputBufferId >= 0) {
+            ByteBuffer inputCodecBuffer = mediaCodec.getInputBuffer(inputBufferId);
+            BufferInfo bufInfo;
+            mInputInfos.clear();
+            int offset = 0;
+            while (mNumInFramesProvided < mNumInFramesRequired) {
+                bufInfo = mInputBufferInfo.get(mIndex);
+                mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+                if (inputCodecBuffer.remaining() < bufInfo.size) {
+                    if (mInputInfos.size() == 0) {
+                        Log.d(TAG, "SampleSize " + inputCodecBuffer.remaining()
+                                + "greater than MediaCodec Buffer size " + bufInfo.size);
+                    }
+                    break;
+                }
+                inputCodecBuffer.put(mInputBuffer.get(mIndex).array());
+                bufInfo.offset = offset; offset += bufInfo.size;
+                mInputInfos.add(bufInfo);
+                mNumInFramesProvided++;
+                mIndex = mNumInFramesProvided % (mInputBufferInfo.size() - 1);
+            }
+            if (mNumInFramesProvided >= mNumInFramesRequired) {
+                mIndex = mInputBufferInfo.size() - 1;
+                bufInfo = mInputBufferInfo.get(mIndex);
+                if (inputCodecBuffer.remaining() > bufInfo.size) {
+                    if ((bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) == 0) {
+                        Log.e(TAG, "Error in EOS flag for Decoder");
+                    }
+                    mSawInputEOS = (bufInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+                    inputCodecBuffer.put(mInputBuffer.get(mIndex).array());
+                    bufInfo.offset = offset; offset += bufInfo.size;
+                    mInputInfos.add(bufInfo);
+                    mNumInFramesProvided++;
+                }
+            }
+            if (mInputInfos.size() == 0) {
+                Log.d(TAG, " No inputs to queue");
+            } else {
+                mStats.addFrameSize(offset);
+                mediaCodec.queueInputBuffers(inputBufferId, mInputInfos);
+            }
+        }
+    }
+
+    private void onOutputsAvailable(MediaCodec mc, int outputBufferId,
+            ArrayDeque<BufferInfo> infos) {
+        if (mSawOutputEOS || outputBufferId < 0) {
+            return;
+        }
+        Iterator<BufferInfo> iter = infos.iterator();
+        while (iter.hasNext()) {
+            BufferInfo bufferInfo = iter.next();
+            mNumOutputFrame++;
+            if (DEBUG) {
+                Log.d(TAG,
+                        "In OutputBufferAvailable ,"
+                                + " output frame number = " + mNumOutputFrame
+                                + " timestamp = " + bufferInfo.presentationTimeUs
+                                + " size = " + bufferInfo.size);
+            }
+            if (mIBufferSend != null) {
+                IBufferXfer.BufferXferInfo info = new IBufferXfer.BufferXferInfo();
+                info.buf = mc.getOutputBuffer(outputBufferId);
+                info.idx = outputBufferId;
+                info.obj = mc;
+                info.bytesRead = bufferInfo.size;
+                info.presentationTimeUs = bufferInfo.presentationTimeUs;
+                info.flag = bufferInfo.flags;
+                info.isComplete = iter.hasNext() ? false : true;
+                mIBufferSend.sendBuffer(this, info);
+            }
+            mSawOutputEOS |= (bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
+        }
+        if (mOutputStream != null) {
+            try {
+                ByteBuffer outputBuffer = mc.getOutputBuffer(outputBufferId);
+                byte[] bytesOutput = new byte[outputBuffer.remaining()];
+                outputBuffer.get(bytesOutput);
+                mOutputStream.write(bytesOutput);
+            } catch (IOException e) {
+                e.printStackTrace();
+                Log.d(TAG, "Error Dumping File: Exception " + e.toString());
+            }
+        }
+        if (mIBufferSend == null) {
+            mc.releaseOutputBuffer(outputBufferId, mRender);
+        }
+        if (mSawOutputEOS) {
+            Log.i(TAG, "Large frame - saw output EOS");
+        }
+        // we don't support frame release queue for large audio frame
+    }
+}
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Muxer.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Muxer.java
index 340b539..786290d 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Muxer.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Muxer.java
@@ -23,6 +23,7 @@
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.ArrayList;
+import java.util.List;
 
 public class Muxer {
     private Stats mStats;
@@ -61,8 +62,8 @@
      * @param inputBufferInfo      Buffer information related to these samples
      * @return Returns Status as 0 if write operation is successful, -1 otherwise
      */
-    public int mux(int trackIndex, ArrayList<ByteBuffer> inputExtractedBuffer,
-                   ArrayList<MediaCodec.BufferInfo> inputBufferInfo) {
+    public int mux(int trackIndex, List<ByteBuffer> inputExtractedBuffer,
+                   List<MediaCodec.BufferInfo> inputBufferInfo) {
         mStats.setStartTime();
         for (int sampleCount = 0; sampleCount < inputExtractedBuffer.size(); sampleCount++) {
             try {
diff --git a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
index 0ebf798..17de1e7 100644
--- a/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
+++ b/media/tests/benchmark/MediaBenchmarkTest/src/main/java/com/android/media/benchmark/library/Stats.java
@@ -23,6 +23,7 @@
 import java.io.FileOutputStream;
 import java.io.IOException;
 import java.util.ArrayList;
+import java.util.List;
 
 /**
  * Measures Performance.
@@ -88,9 +89,9 @@
 
     public long getStartTime() { return mStartTimeNs; }
 
-    public ArrayList<Long> getOutputTimers() { return mOutputTimer; }
+    public List<Long> getOutputTimers() { return mOutputTimer; }
 
-    public ArrayList<Long> getInputTimers() { return mInputTimer; }
+    public List<Long> getInputTimers() { return mInputTimer; }
 
     public long getTimeDiff(long sTime, long eTime) { return (eTime - sTime); }
 
diff --git a/media/utils/Android.bp b/media/utils/Android.bp
index 5b7319a..e340b40 100644
--- a/media/utils/Android.bp
+++ b/media/utils/Android.bp
@@ -113,6 +113,7 @@
 
     export_shared_lib_headers: [
         "libpermission",
+        "packagemanager_aidl-cpp",
     ],
 
     required: [
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 4b0192a..e13f8f7 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -41,13 +41,16 @@
 
 namespace android {
 
+namespace {
+constexpr auto PERMISSION_HARD_DENIED = permission::PermissionChecker::PERMISSION_HARD_DENIED;
+}
+
 using content::AttributionSourceState;
 
 static const String16 sAndroidPermissionRecordAudio("android.permission.RECORD_AUDIO");
 static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
 static const String16 sModifyAudioRouting("android.permission.MODIFY_AUDIO_ROUTING");
 static const String16 sCallAudioInterception("android.permission.CALL_AUDIO_INTERCEPTION");
-static const String16 sAndroidPermissionBluetoothConnect("android.permission.BLUETOOTH_CONNECT");
 
 static String16 resolveCallingPackage(PermissionController& permissionController,
         const std::optional<String16> opPackageName, uid_t uid) {
@@ -116,7 +119,7 @@
     return std::optional<AttributionSourceState>{myAttributionSource};
 }
 
-    static bool checkRecordingInternal(const AttributionSourceState &attributionSource,
+    static int checkRecordingInternal(const AttributionSourceState &attributionSource,
                                        const uint32_t virtualDeviceId,
                                        const String16 &msg, bool start, audio_source_t source) {
     // Okay to not track in app ops as audio server or media server is us and if
@@ -139,15 +142,15 @@
     const int32_t attributedOpCode = getOpForSource(source);
 
     permission::PermissionChecker permissionChecker;
-    bool permitted = false;
+    int permitted;
     if (start) {
-        permitted = (permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
+        permitted = permissionChecker.checkPermissionForStartDataDeliveryFromDatasource(
                 sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
-                attributedOpCode) != permission::PermissionChecker::PERMISSION_HARD_DENIED);
+                attributedOpCode);
     } else {
-        permitted = (permissionChecker.checkPermissionForPreflightFromDatasource(
+        permitted = permissionChecker.checkPermissionForPreflightFromDatasource(
                 sAndroidPermissionRecordAudio, resolvedAttributionSource.value(), msg,
-                attributedOpCode) != permission::PermissionChecker::PERMISSION_HARD_DENIED);
+                attributedOpCode);
     }
 
     return permitted;
@@ -157,17 +160,17 @@
 
 bool recordingAllowed(const AttributionSourceState &attributionSource, audio_source_t source) {
     return checkRecordingInternal(attributionSource, DEVICE_ID_DEFAULT, String16(), /*start*/ false,
-                                  source);
+                                  source) != PERMISSION_HARD_DENIED;
 }
 
 bool recordingAllowed(const AttributionSourceState &attributionSource,
                       const uint32_t virtualDeviceId,
                       audio_source_t source) {
     return checkRecordingInternal(attributionSource, virtualDeviceId,
-                                  String16(), /*start*/ false, source);
+                                  String16(), /*start*/ false, source) != PERMISSION_HARD_DENIED;
 }
 
-bool startRecording(const AttributionSourceState& attributionSource,
+int startRecording(const AttributionSourceState& attributionSource,
                     const uint32_t virtualDeviceId,
                     const String16& msg,
                     audio_source_t source) {
@@ -287,7 +290,7 @@
 bool modifyAudioRoutingAllowed(const AttributionSourceState& attributionSource) {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
+    if (isAudioServerUid(uid)) return true;
     // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
     bool ok = PermissionCache::checkPermission(sModifyAudioRouting, pid, uid);
     if (!ok) ALOGE("%s(): android.permission.MODIFY_AUDIO_ROUTING denied for uid %d",
@@ -302,7 +305,7 @@
 bool modifyDefaultAudioEffectsAllowed(const AttributionSourceState& attributionSource) {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
+    if (isAudioServerUid(uid)) return true;
 
     static const String16 sModifyDefaultAudioEffectsAllowed(
             "android.permission.MODIFY_DEFAULT_AUDIO_EFFECTS");
@@ -393,48 +396,6 @@
     return NO_ERROR;
 }
 
-/**
- * Determines if the MAC address in Bluetooth device descriptors returned by APIs of
- * a native audio service (audio flinger, audio policy) must be anonymized.
- * MAC addresses returned to system server or apps with BLUETOOTH_CONNECT permission
- * are not anonymized.
- *
- * @param attributionSource The attribution source of the calling app.
- * @param caller string identifying the caller for logging.
- * @return true if the MAC addresses must be anonymized, false otherwise.
- */
-bool mustAnonymizeBluetoothAddress(
-        const AttributionSourceState& attributionSource, const String16& caller) {
-    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
-    if (isAudioServerOrSystemServerUid(uid)) {
-        return false;
-    }
-    const std::optional<AttributionSourceState> resolvedAttributionSource =
-            resolveAttributionSource(attributionSource, DEVICE_ID_DEFAULT);
-    if (!resolvedAttributionSource.has_value()) {
-        return true;
-    }
-    permission::PermissionChecker permissionChecker;
-    return permissionChecker.checkPermissionForPreflightFromDatasource(
-            sAndroidPermissionBluetoothConnect, resolvedAttributionSource.value(), caller,
-            AppOpsManager::OP_BLUETOOTH_CONNECT)
-                != permission::PermissionChecker::PERMISSION_GRANTED;
-}
-
-/**
- * Modifies the passed MAC address string in place for consumption by unprivileged clients.
- * the string is assumed to have a valid MAC address format.
- * the anonymzation must be kept in sync with toAnonymizedAddress() in BluetoothUtils.java
- *
- * @param address input/output the char string contining the MAC address to anonymize.
- */
-void anonymizeBluetoothAddress(char *address) {
-    if (address == nullptr || strlen(address) != strlen("AA:BB:CC:DD:EE:FF")) {
-        return;
-    }
-    memcpy(address, "XX:XX:XX:XX", strlen("XX:XX:XX:XX"));
-}
-
 sp<content::pm::IPackageManagerNative> MediaPackageManager::retrievePackageManager() {
     const sp<IServiceManager> sm = defaultServiceManager();
     if (sm == nullptr) {
@@ -516,35 +477,38 @@
     }
 }
 
+namespace mediautils {
+
 // How long we hold info before we re-fetch it (24 hours) if we found it previously.
 static constexpr nsecs_t INFO_EXPIRATION_NS = 24 * 60 * 60 * NANOS_PER_SECOND;
 // Maximum info records we retain before clearing everything.
 static constexpr size_t INFO_CACHE_MAX = 1000;
 
 // The original code is from MediaMetricsService.cpp.
-mediautils::UidInfo::Info mediautils::UidInfo::getInfo(uid_t uid)
+std::shared_ptr<const UidInfo::Info> UidInfo::getCachedInfo(uid_t uid)
 {
+    std::shared_ptr<const UidInfo::Info> info;
+
     const nsecs_t now = systemTime(SYSTEM_TIME_REALTIME);
-    struct mediautils::UidInfo::Info info;
     {
         std::lock_guard _l(mLock);
         auto it = mInfoMap.find(uid);
         if (it != mInfoMap.end()) {
             info = it->second;
             ALOGV("%s: uid %d expiration %lld now %lld",
-                    __func__, uid, (long long)info.expirationNs, (long long)now);
-            if (info.expirationNs <= now) {
+                    __func__, uid, (long long)info->expirationNs, (long long)now);
+            if (info->expirationNs <= now) {
                 // purge the stale entry and fall into re-fetching
                 ALOGV("%s: entry for uid %d expired, now %lld",
                         __func__, uid, (long long)now);
                 mInfoMap.erase(it);
-                info.uid = (uid_t)-1;  // this is always fully overwritten
+                info.reset();  // force refetch
             }
         }
     }
 
     // if we did not find it in our map, look it up
-    if (info.uid == (uid_t)(-1)) {
+    if (!info) {
         sp<IServiceManager> sm = defaultServiceManager();
         sp<content::pm::IPackageManagerNative> package_mgr;
         if (sm.get() == nullptr) {
@@ -629,17 +593,30 @@
         // first clear if we have too many cached elements.  This would be rare.
         if (mInfoMap.size() >= INFO_CACHE_MAX) mInfoMap.clear();
 
-        // always overwrite
-        info.uid = uid;
-        info.package = std::move(pkg);
-        info.installer = std::move(installer);
-        info.versionCode = versionCode;
-        info.expirationNs = now + (notFound ? 0 : INFO_EXPIRATION_NS);
+        info = std::make_shared<const UidInfo::Info>(
+                uid,
+                std::move(pkg),
+                std::move(installer),
+                versionCode,
+                now + (notFound ? 0 : INFO_EXPIRATION_NS));
         ALOGV("%s: adding uid %d package '%s' expirationNs: %lld",
-                __func__, uid, info.package.c_str(), (long long)info.expirationNs);
+                __func__, uid, info->package.c_str(), (long long)info->expirationNs);
         mInfoMap[uid] = info;
     }
     return info;
 }
 
+/* static */
+UidInfo& UidInfo::getUidInfo() {
+    [[clang::no_destroy]] static UidInfo uidInfo;
+    return uidInfo;
+}
+
+/* static */
+std::shared_ptr<const UidInfo::Info> UidInfo::getInfo(uid_t uid) {
+    return UidInfo::getUidInfo().getCachedInfo(uid);
+}
+
+} // namespace mediautils
+
 } // namespace android
diff --git a/media/utils/TimeCheck.cpp b/media/utils/TimeCheck.cpp
index ec68de7..6a5bbbe 100644
--- a/media/utils/TimeCheck.cpp
+++ b/media/utils/TimeCheck.cpp
@@ -23,6 +23,7 @@
 #include <android-base/logging.h>
 #include <android-base/strings.h>
 #include <audio_utils/clock.h>
+#include <cutils/properties.h>
 #include <mediautils/EventLog.h>
 #include <mediautils/FixedString.h>
 #include <mediautils/MethodStatistics.h>
@@ -36,6 +37,46 @@
 
 
 namespace android::mediautils {
+
+// Note: The sum of kDefaultTimeOutDurationMs and kDefaultSecondChanceDurationMs
+// should be no less than 2 seconds, otherwise spurious timeouts
+// may occur with system suspend.
+static constexpr int kDefaultTimeoutDurationMs = 3000;
+
+// Due to suspend abort not incrementing the monotonic clock,
+// we allow another second chance timeout after the first timeout expires.
+//
+// The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
+// and the result is more stable when the monotonic clock increments during suspend.
+//
+static constexpr int kDefaultSecondChanceDurationMs = 2000;
+
+/* static */
+TimeCheck::Duration TimeCheck::getDefaultTimeoutDuration() {
+    static constinit std::atomic<int> defaultTimeoutDurationMs{};
+    auto defaultMs = defaultTimeoutDurationMs.load(std::memory_order_relaxed);
+    if (defaultMs == 0) {
+        defaultMs = property_get_int32(
+                "audio.timecheck.timeout_duration_ms", kDefaultTimeoutDurationMs);
+        if (defaultMs < 1) defaultMs = kDefaultTimeoutDurationMs;
+        defaultTimeoutDurationMs.store(defaultMs, std::memory_order_relaxed);
+    }
+    return std::chrono::milliseconds(defaultMs);
+}
+
+/* static */
+TimeCheck::Duration TimeCheck::getDefaultSecondChanceDuration() {
+    static constinit std::atomic<int> defaultSecondChanceDurationMs{};
+    auto defaultMs = defaultSecondChanceDurationMs.load(std::memory_order_relaxed);
+    if (defaultMs == 0) {
+        defaultMs = property_get_int32(
+                "audio.timecheck.second_chance_duration_ms", kDefaultSecondChanceDurationMs);
+        if (defaultMs < 1) defaultMs = kDefaultSecondChanceDurationMs;
+        defaultSecondChanceDurationMs.store(defaultMs, std::memory_order_relaxed);
+    }
+    return std::chrono::milliseconds(defaultMs);
+}
+
 // This function appropriately signals a pid to dump a backtrace if we are
 // running on device (and the HAL exists). If we are not running on an Android
 // device, there is no HAL to signal (so we do nothing).
@@ -143,6 +184,22 @@
 }
 
 /* static */
+std::string TimeCheck::signalAudioHals() {
+    std::vector<pid_t> pids = getAudioHalPids();
+    std::string halPids;
+    if (pids.size() != 0) {
+        for (const auto& pid : pids) {
+            ALOGI("requesting tombstone for pid: %d", pid);
+            halPids.append(std::to_string(pid)).append(" ");
+            signalAudioHAL(pid);
+        }
+        // Allow time to complete, usually the caller is forcing restart afterwards.
+        sleep(1);
+    }
+    return halPids;
+}
+
+/* static */
 TimerThread& TimeCheck::getTimeCheckThread() {
     static TimerThread sTimeCheckThread{};
     return sTimeCheckThread;
@@ -182,23 +239,25 @@
 
 /* static */
 std::string TimeCheck::analyzeTimeouts(
-        float requestedTimeoutMs, float elapsedSteadyMs, float elapsedSystemMs) {
+        float requestedTimeoutMs, float secondChanceMs,
+        float elapsedSteadyMs, float elapsedSystemMs) {
     // Track any OS clock issues with suspend.
     // It is possible that the elapsedSystemMs is much greater than elapsedSteadyMs if
     // a suspend occurs; however, we always expect the timeout ms should always be slightly
     // less than the elapsed steady ms regardless of whether a suspend occurs or not.
 
-    std::string s("Timeout ms ");
-    s.append(std::to_string(requestedTimeoutMs))
-        .append(" elapsed steady ms ").append(std::to_string(elapsedSteadyMs))
-        .append(" elapsed system ms ").append(std::to_string(elapsedSystemMs));
+    const float totalTimeoutMs = requestedTimeoutMs + secondChanceMs;
+    std::string s = std::format(
+            "Timeout ms {:.2f} ({:.2f} + {:.2f})"
+            " elapsed steady ms {:.4f} elapsed system ms {:.4f}",
+            totalTimeoutMs, requestedTimeoutMs, secondChanceMs, elapsedSteadyMs, elapsedSystemMs);
 
     // Is there something unusual?
     static constexpr float TOLERANCE_CONTEXT_SWITCH_MS = 200.f;
 
-    if (requestedTimeoutMs > elapsedSteadyMs || requestedTimeoutMs > elapsedSystemMs) {
+    if (totalTimeoutMs > elapsedSteadyMs || totalTimeoutMs > elapsedSystemMs) {
         s.append("\nError: early expiration - "
-                "requestedTimeoutMs should be less than elapsed time");
+                "totalTimeoutMs should be less than elapsed time");
     }
 
     if (elapsedSteadyMs > elapsedSystemMs + TOLERANCE_CONTEXT_SWITCH_MS) {
@@ -206,13 +265,13 @@
     }
 
     // This has been found in suspend stress testing.
-    if (elapsedSteadyMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+    if (elapsedSteadyMs > totalTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
         s.append("\nWarning: steady time significantly exceeds timeout "
                 "- possible thread stall or aborted suspend");
     }
 
     // This has been found in suspend stress testing.
-    if (elapsedSystemMs > requestedTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
+    if (elapsedSystemMs > totalTimeoutMs + TOLERANCE_CONTEXT_SWITCH_MS) {
         s.append("\nInformation: system time significantly exceeds timeout "
                 "- possible suspend");
     }
@@ -259,21 +318,14 @@
     // HAL processes which can affect thread behavior.
     const auto snapshotAnalysis = getTimeCheckThread().getSnapshotAnalysis(4 /* retiredCount */);
 
-    // Generate audio HAL processes tombstones and allow time to complete
-    // before forcing restart
-    std::vector<pid_t> pids = TimeCheck::getAudioHalPids();
-    std::string halPids = "HAL pids [ ";
-    if (pids.size() != 0) {
-        for (const auto& pid : pids) {
-            ALOGI("requesting tombstone for pid: %d", pid);
-            halPids.append(std::to_string(pid)).append(" ");
-            signalAudioHAL(pid);
-        }
-        sleep(1);
+    // Generate audio HAL processes tombstones.
+    std::string halPids = signalAudioHals();
+    if (!halPids.empty()) {
+        halPids = "HAL pids [ " + halPids + "]";
     } else {
-        ALOGI("No HAL process pid available, skipping tombstones");
+        halPids = "No HAL process pids available";
+        ALOGI("%s", (halPids + ", skipping tombstones").c_str());
     }
-    halPids.append("]");
 
     LOG_EVENT_STRING(LOGTAG_AUDIO_BINDER_TIMEOUT, tag.c_str());
 
@@ -282,7 +334,7 @@
             .append(tag)
             .append(" scheduled ").append(formatTime(startSystemTime))
             .append(" on thread ").append(std::to_string(tid)).append("\n")
-            .append(analyzeTimeouts(requestedTimeoutMs + secondChanceMs,
+            .append(analyzeTimeouts(requestedTimeoutMs, secondChanceMs,
                     elapsedSteadyMs, elapsedSystemMs)).append("\n")
             .append(halPids).append("\n")
             .append(snapshotAnalysis.toString());
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 9c02cd4..2631469 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -20,6 +20,7 @@
 #include <unistd.h>
 
 #include <android/content/pm/IPackageManagerNative.h>
+#include <android-base/thread_annotations.h>
 #include <binder/IMemory.h>
 #include <binder/PermissionController.h>
 #include <cutils/multiuser.h>
@@ -91,7 +92,7 @@
 bool recordingAllowed(const AttributionSourceState &attributionSource,
                       uint32_t virtualDeviceId,
                       audio_source_t source);
-bool startRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
+int startRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
                     const String16& msg, audio_source_t source);
 void finishRecording(const AttributionSourceState& attributionSource, uint32_t virtualDeviceId,
                      audio_source_t source);
@@ -113,10 +114,6 @@
 bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
 bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource);
 void purgePermissionCache();
-bool mustAnonymizeBluetoothAddress(
-        const AttributionSourceState& attributionSource, const String16& caller);
-void anonymizeBluetoothAddress(char *address);
-
 int32_t getOpForSource(audio_source_t source);
 
 AttributionSourceState getCallingAttributionSource();
@@ -171,12 +168,18 @@
      *
      * \param uid is the uid of the app or service.
      */
-    Info getInfo(uid_t uid);
+    std::shared_ptr<const Info> getCachedInfo(uid_t uid);
+
+    /* return a singleton */
+    static UidInfo& getUidInfo();
+
+    /* returns a non-null pointer to a const Info struct */
+    static std::shared_ptr<const Info> getInfo(uid_t uid);
 
 private:
     std::mutex mLock;
     // TODO: use concurrent hashmap with striped lock.
-    std::unordered_map<uid_t, Info> mInfoMap; // GUARDED_BY(mLock)
+    std::unordered_map<uid_t, std::shared_ptr<const Info>> mInfoMap GUARDED_BY(mLock);
 };
 
 } // namespace mediautils
diff --git a/media/utils/include/mediautils/TimeCheck.h b/media/utils/include/mediautils/TimeCheck.h
index f1d572f..c112863 100644
--- a/media/utils/include/mediautils/TimeCheck.h
+++ b/media/utils/include/mediautils/TimeCheck.h
@@ -42,19 +42,29 @@
     //  float elapsedMs (the elapsed time to this event).
     using OnTimerFunc = std::function<void(bool /* timeout */, float /* elapsedMs */ )>;
 
-    // The default timeout is chosen to be less than system server watchdog timeout
-    // Note: kDefaultTimeOutMs should be no less than 2 seconds, otherwise spurious timeouts
-    // may occur with system suspend.
-    static constexpr TimeCheck::Duration kDefaultTimeoutDuration = std::chrono::milliseconds(3000);
+    /**
+     * Returns the default timeout to use for TimeCheck.
+     *
+     * The default timeout of 3000ms (kDefaultTimeoutDurationMs) is chosen to be less than
+     * the system server watchdog timeout, and can be changed by the sysprop
+     * audio.timecheck.timeout_duration_ms.
+     * A second chance wait may be set to extend the check.
+     */
+    static TimeCheck::Duration getDefaultTimeoutDuration();
 
-    // Due to suspend abort not incrementing the monotonic clock,
-    // we allow another second chance timeout after the first timeout expires.
-    //
-    // The total timeout is therefore kDefaultTimeoutDuration + kDefaultSecondChanceDuration,
-    // and the result is more stable when the monotonic clock increments during suspend.
-    //
-    static constexpr TimeCheck::Duration kDefaultSecondChanceDuration =
-            std::chrono::milliseconds(2000);
+    /**
+     * Returns the second chance timeout to use for TimeCheck.
+     *
+     * Due to suspend abort not incrementing the monotonic clock,
+     * we allow another second chance timeout after the first timeout expires.
+     * The second chance timeout default of 2000ms (kDefaultSecondChanceDurationMs)
+     * may be changed by the sysprop audio.timecheck.second_chance_duration_ms.
+     *
+     * The total timeout is therefore
+     * getDefaultTimeoutDuration() + getDefaultSecondChanceDuration(),
+     * and the result is more stable when the monotonic clock increments during suspend.
+     */
+    static TimeCheck::Duration getDefaultSecondChanceDuration();
 
     /**
      * TimeCheck is a RAII object which will notify a callback
@@ -97,6 +107,7 @@
     static std::string toString();
     static void setAudioHalPids(const std::vector<pid_t>& pids);
     static std::vector<pid_t> getAudioHalPids();
+    static std::string signalAudioHals();
 
   private:
     // Helper class for handling events.
@@ -130,7 +141,8 @@
     // Returns a string that represents the timeout vs elapsed time,
     // and diagnostics if there are any potential issues.
     static std::string analyzeTimeouts(
-            float timeoutMs, float elapsedSteadyMs, float elapsedSystemMs);
+            float timeoutMs, float secondChanceMs,
+            float elapsedSteadyMs, float elapsedSystemMs);
 
     static TimerThread& getTimeCheckThread();
     static void accessAudioHalPids(std::vector<pid_t>* pids, bool update);
diff --git a/media/utils/include/mediautils/jthread.h b/media/utils/include/mediautils/jthread.h
new file mode 100644
index 0000000..17532a4
--- /dev/null
+++ b/media/utils/include/mediautils/jthread.h
@@ -0,0 +1,92 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <atomic>
+#include <thread>
+#include <utility>
+
+namespace android::mediautils {
+
+namespace impl {
+class stop_source;
+/**
+ * Const view on stop source, which the running thread uses and an interface
+ * for cancellation.
+ */
+class stop_token {
+  public:
+    stop_token(const stop_source& source) : stop_source_(source) {}
+    bool stop_requested() const;
+
+  private:
+    const stop_source& stop_source_;
+};
+
+class stop_source {
+  public:
+    stop_token get_token() { return stop_token{*this}; }
+    bool stop_requested() const { return cancellation_signal_.load(); }
+    bool request_stop() {
+        auto f = false;
+        return cancellation_signal_.compare_exchange_strong(f, true);
+    }
+
+  private:
+    std::atomic_bool cancellation_signal_ = false;
+};
+
+inline bool stop_token::stop_requested() const {
+    return stop_source_.stop_requested();
+}
+}  // namespace impl
+
+using stop_token = impl::stop_token;
+/**
+ * Just a jthread, since std::jthread is still experimental in our toolchain.
+ * Implements a subset of essential functionality (co-op cancellation and join on dtor).
+ * If jthread gets picked up, usage can be cut over.
+ */
+class jthread {
+  public:
+    /**
+     * Construct/launch and thread with a callable which consumes a stop_token.
+     * The callable must be cooperatively cancellable via stop_token::stop_requested(), and will be
+     * automatically stopped then joined on destruction.
+     * Example:
+     * jthread([](stop_token stok) {
+     *     while(!stok.stop_requested) {
+     *         // do work
+     *     }
+     * }
+     */
+    template <typename F>
+    jthread(F&& f) : stop_source_{}, thread_{std::forward<F>(f), stop_source_.get_token()} {}
+
+    ~jthread() {
+        stop_source_.request_stop();
+        thread_.join();
+    }
+
+    bool request_stop() { return stop_source_.request_stop(); }
+
+  private:
+    // order matters
+    impl::stop_source stop_source_;
+    std::thread thread_;
+};
+}  // namespace android::mediautils
diff --git a/media/utils/tests/Android.bp b/media/utils/tests/Android.bp
index a68569a..ff11b42 100644
--- a/media/utils/tests/Android.bp
+++ b/media/utils/tests/Android.bp
@@ -237,3 +237,11 @@
         "shared_memory_allocator_tests.cpp",
     ],
 }
+
+cc_test {
+    name: "jthread_tests",
+    defaults: ["libmediautils_tests_defaults"],
+    srcs: [
+        "jthread_tests.cpp",
+    ],
+}
diff --git a/media/utils/tests/jthread_tests.cpp b/media/utils/tests/jthread_tests.cpp
new file mode 100644
index 0000000..ed77c27
--- /dev/null
+++ b/media/utils/tests/jthread_tests.cpp
@@ -0,0 +1,70 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "jthread_tests"
+
+#include <mediautils/jthread.h>
+
+#include <gtest/gtest.h>
+
+#include <atomic>
+
+using namespace android::mediautils;
+
+namespace {
+TEST(jthread_tests, dtor) {
+    std::atomic_int x = 0;
+    std::atomic_bool is_stopped = false;
+    {
+        auto jt = jthread([&](stop_token stok) {
+            while (!stok.stop_requested()) {
+                if (x.load() < std::numeric_limits<int>::max())
+                    x++;
+            }
+            is_stopped = true;
+        });
+        while (x.load() < 1000)
+            ;
+    }
+    // Check we triggered a stop on dtor
+    ASSERT_TRUE(is_stopped.load());
+    // Check we actually ran
+    ASSERT_GE(x.load(), 1000);
+}
+TEST(jthread_tests, request_stop) {
+    std::atomic_int x = 0;
+    std::atomic_bool is_stopped = false;
+    auto jt = jthread([&](stop_token stok) {
+        while (!stok.stop_requested()) {
+            if (x.load() < std::numeric_limits<int>::max())
+                x++;
+        }
+        is_stopped = true;
+    });
+    while (x.load() < 1000)
+        ;
+    // request stop manually
+    ASSERT_TRUE(jt.request_stop());
+    // busy loop till thread acks
+    while (!is_stopped.load())
+        ;
+    // Check we triggered a stop on dtor
+    ASSERT_TRUE(is_stopped.load());
+    // Check we actually ran
+    ASSERT_GE(x.load(), 1000);
+}
+
+}  // namespace
diff --git a/media/utils/tests/static_string_view_tests.cpp b/media/utils/tests/static_string_view_tests.cpp
index c00de68..1dd2370 100644
--- a/media/utils/tests/static_string_view_tests.cpp
+++ b/media/utils/tests/static_string_view_tests.cpp
@@ -37,14 +37,12 @@
     // const std::array<char,2> nonstatic = {'a', 'b'};
     // static_assert(can_assign<nonstatic>::value == false);
     static std::array<char, 2> nonconst = {'a', 'b'};
-    static const std::array<char, 2> nonconstexpr = {'a', 'b'};
     static constexpr std::array<int, 2> nonchar = {1, 2};
     static constexpr size_t nonarray = 2;
 
     static_assert(CanCreate<nonconst>::value == false);
     static_assert(CanCreate<nonarray>::value == false);
     static_assert(CanCreate<nonchar>::value == false);
-    static_assert(CanCreate<nonconstexpr>::value == false);
 
     static constexpr std::array<char, 2> scoped = {'a', 'b'};
     constexpr StaticStringView Ticket1 = StaticStringView::create<global>();
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index 9016420..01bde42 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -141,11 +141,18 @@
 cc_defaults {
     name: "libaudioflinger_dependencies",
 
+    header_libs: [
+        "libaudiohal_headers", // required for AudioFlinger
+    ],
+
     shared_libs: [
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
+        "audiopermissioncontroller",
         "av-types-aidl-cpp",
         "com.android.media.audio-aconfig-cc",
+        "com.android.media.audioserver-aconfig-cc",
         "effect-aidl-cpp",
         "libactivitymanager_aidl",
         "libaudioclient",
@@ -177,11 +184,6 @@
         "libvibrator",
         "packagemanager_aidl-cpp",
     ],
-
-    static_libs: [
-        "libaudiospdif",
-        "libmedialogservice",
-    ],
 }
 
 cc_library {
@@ -213,22 +215,41 @@
     ],
 
     static_libs: [
+        "libaudiospdif",
         "libcpustats",
-        "libpermission",
+        "libmedialogservice",
     ],
 
     header_libs: [
+        "audiopolicyservicelocal_headers",
         "libaaudio_headers",
-        "libaudioclient_headers",
-        "libaudiohal_headers",
-        "libaudioutils_headers",
         "libmedia_headers",
     ],
 
+    export_header_lib_headers: ["audiopolicyservicelocal_headers"],
+
+    export_include_dirs: ["."],
+
     export_shared_lib_headers: [
         "libpermission",
     ],
 
+    export_static_lib_headers: [
+        "libpshutils",
+    ],
+
+    shared: {
+        static_libs: [
+            "libpshutils",
+        ],
+    },
+
+    static: {
+        whole_static_libs: [
+            "libpshutils",
+        ],
+    },
+
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index d5d778f..4c7087e 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -17,6 +17,8 @@
 
 #define LOG_TAG "AudioFlinger"
 //#define LOG_NDEBUG 0
+#define ATRACE_TAG ATRACE_TAG_AUDIO
+#include <utils/Trace.h>
 
 // Define AUDIO_ARRAYS_STATIC_CHECK to check all audio arrays are correct
 #define AUDIO_ARRAYS_STATIC_CHECK 1
@@ -39,13 +41,17 @@
 #include <binder/IServiceManager.h>
 #include <binder/Parcel.h>
 #include <cutils/properties.h>
+#include <com_android_media_audio.h>
 #include <com_android_media_audioserver.h>
 #include <media/AidlConversion.h>
 #include <media/AudioParameter.h>
 #include <media/AudioValidator.h>
 #include <media/IMediaLogService.h>
+#include <media/IPermissionProvider.h>
 #include <media/MediaMetricsItem.h>
+#include <media/NativePermissionController.h>
 #include <media/TypeConverter.h>
+#include <media/ValidatedAttributionSourceState.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/MemoryLeakTrackUtil.h>
 #include <mediautils/MethodStatistics.h>
@@ -81,12 +87,17 @@
 namespace android {
 
 using ::android::base::StringPrintf;
+using aidl_utils::statusTFromBinderStatus;
 using media::IEffectClient;
 using media::audio::common::AudioMMapPolicyInfo;
 using media::audio::common::AudioMMapPolicyType;
 using media::audio::common::AudioMode;
 using android::content::AttributionSourceState;
 using android::detail::AudioHalVersionInfo;
+using com::android::media::permission::INativePermissionController;
+using com::android::media::permission::IPermissionProvider;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::ValidatedAttributionSourceState;
 
 static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
         AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
@@ -118,6 +129,52 @@
     }
 }
 
+static error::BinderResult<ValidatedAttributionSourceState>
+validateAttributionFromContextOrTrustedCaller(AttributionSourceState attr,
+        const IPermissionProvider& provider) {
+    const auto callingUid = IPCThreadState::self()->getCallingUid();
+    // We trust the following UIDs to appropriate validated identities above us
+    if (isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+        // Legacy paths may not properly populate package name, so we attempt to handle.
+        if (!attr.packageName.has_value() || attr.packageName.value() == "") {
+            ALOGW("Trusted client %d provided attr with missing package name" , callingUid);
+            attr.packageName = VALUE_OR_RETURN(provider.getPackagesForUid(callingUid))[0];
+        }
+        // Behavior change: In the case of delegation, if pid is invalid,
+        // filling it in with the callingPid will cause a mismatch between the
+        // pid and the uid in the attribution, which is error-prone.
+        // Instead, assert that the pid from a trusted source is valid
+        if (attr.pid == -1) {
+            if (callingUid != static_cast<uid_t>(attr.uid)) {
+                return error::unexpectedExceptionCode(binder::Status::EX_ILLEGAL_ARGUMENT,
+                        "validateAttribution: Invalid pid from delegating trusted source");
+            } else {
+                // Legacy handling for trusted clients which may not fill pid correctly
+                attr.pid = IPCThreadState::self()->getCallingPid();
+            }
+        }
+        return ValidatedAttributionSourceState::createFromTrustedSource(std::move(attr));
+    } else {
+        // Behavior change: Populate pid with callingPid unconditionally. Previously, we
+        // allowed caller provided pid, if uid matched calling context, but this is error-prone
+        // since it allows mismatching uid/pid
+        return ValidatedAttributionSourceState::createFromBinderContext(std::move(attr), provider);
+    }
+}
+
+#define VALUE_OR_RETURN_CONVERTED(exp)                                                \
+    ({                                                                                \
+        auto _tmp = (exp);                                                            \
+        if (!_tmp.ok()) {                                                             \
+            ALOGE("Function: %s Line: %d Failed result (%s)", __FUNCTION__, __LINE__, \
+                  errorToString(_tmp.error()).c_str());                               \
+            return statusTFromBinderStatus(_tmp.error());                             \
+        }                                                                             \
+        std::move(_tmp.value());                                                      \
+    })
+
+
+
 // Creates association between Binder code to name for IAudioFlinger.
 #define IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST \
 BINDER_METHOD_ENTRY(createTrack) \
@@ -132,8 +189,7 @@
 BINDER_METHOD_ENTRY(masterMute) \
 BINDER_METHOD_ENTRY(setStreamVolume) \
 BINDER_METHOD_ENTRY(setStreamMute) \
-BINDER_METHOD_ENTRY(streamVolume) \
-BINDER_METHOD_ENTRY(streamMute) \
+BINDER_METHOD_ENTRY(setPortsVolume) \
 BINDER_METHOD_ENTRY(setMode) \
 BINDER_METHOD_ENTRY(setMicMute) \
 BINDER_METHOD_ENTRY(getMicMute) \
@@ -193,6 +249,7 @@
 BINDER_METHOD_ENTRY(getSoundDoseInterface) \
 BINDER_METHOD_ENTRY(getAudioPolicyConfig) \
 BINDER_METHOD_ENTRY(getAudioMixPort) \
+BINDER_METHOD_ENTRY(resetReferencesForTest) \
 
 // singleton for Binder Method Statistics for IAudioFlinger
 static auto& getIAudioFlingerStatistics() {
@@ -466,6 +523,8 @@
             sMediaLogService->unregisterWriter(iMemory);
         }
     }
+    mMediaLogNotifier->requestExit();
+    mPatchCommandThread->exit();
 }
 
 //static
@@ -516,30 +575,42 @@
     audio_attributes_t localAttr = *attr;
 
     // TODO b/182392553: refactor or make clearer
-    pid_t clientPid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
-    bool updatePid = (clientPid == (pid_t)-1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    AttributionSourceState adjAttributionSource;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        pid_t clientPid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+        bool updatePid = (clientPid == (pid_t)-1);
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
 
-    AttributionSourceState adjAttributionSource = client.attributionSource;
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        uid_t clientUid =
-            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
-        ALOGW_IF(clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        updatePid = true;
-    }
-    if (updatePid) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
+        adjAttributionSource = client.attributionSource;
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            uid_t clientUid =
+                VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+            ALOGW_IF(clientUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, clientUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            updatePid = true;
+        }
+        if (updatePid) {
+            const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, clientPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
             adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(client.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+    }
 
     if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
         audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
@@ -549,6 +620,7 @@
         std::vector<audio_io_handle_t> secondaryOutputs;
         bool isSpatialized;
         bool isBitPerfect;
+        float volume;
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
                                             &streamType, adjAttributionSource,
@@ -556,7 +628,8 @@
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
                                             deviceId, &portId, &secondaryOutputs, &isSpatialized,
-                                            &isBitPerfect);
+                                            &isBitPerfect,
+                                            &volume);
         if (ret != NO_ERROR) {
             config->sample_rate = fullConfig.sample_rate;
             config->channel_mask = fullConfig.channel_mask;
@@ -680,20 +753,22 @@
 
     result.append("Notification Clients:\n");
     result.append("   pid    uid  name\n");
-    for (size_t i = 0; i < mNotificationClients.size(); ++i) {
-        const pid_t pid = mNotificationClients[i]->getPid();
-        const uid_t uid = mNotificationClients[i]->getUid();
-        const mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
-        result.appendFormat("%6d %6u  %s\n", pid, uid, info.package.c_str());
+    for (const auto& [ _, client ] : mNotificationClients) {
+        const uid_t uid = client->getUid();
+        const std::shared_ptr<const mediautils::UidInfo::Info> info =
+                mediautils::UidInfo::getInfo(uid);
+        result.appendFormat("%6d %6u  %s\n",
+                client->getPid(), uid, info->package.c_str());
     }
 
     result.append("Global session refs:\n");
     result.append("  session  cnt     pid    uid  name\n");
     for (size_t i = 0; i < mAudioSessionRefs.size(); i++) {
         AudioSessionRef *r = mAudioSessionRefs[i];
-        const mediautils::UidInfo::Info info = mUidInfo.getInfo(r->mUid);
+        const std::shared_ptr<const mediautils::UidInfo::Info> info =
+                mediautils::UidInfo::getInfo(r->mUid);
         result.appendFormat("  %7d %4d %7d %6u  %s\n", r->mSessionid, r->mCnt, r->mPid,
-                r->mUid, info.package.c_str());
+                r->mUid, info->package.c_str());
     }
     write(fd, result.c_str(), result.size());
 }
@@ -826,6 +901,17 @@
 
         BUFLOG_RESET;
 
+        if (media::psh_utils::AudioPowerManager::enabled()) {
+            char value[PROPERTY_VALUE_MAX];
+            property_get("ro.build.display.id", value, "Unknown build");
+            std::string build(value);
+            build.append("\n");
+            write(fd, build.c_str(), build.size());
+            const std::string powerLog =
+                    media::psh_utils::AudioPowerManager::getAudioPowerManager().toString();
+            write(fd, powerLog.c_str(), powerLog.size());
+        }
+
         if (locked) {
             mutex().unlock();
         }
@@ -981,6 +1067,7 @@
 status_t AudioFlinger::createTrack(const media::CreateTrackRequest& _input,
                                    media::CreateTrackResponse& _output)
 {
+    ATRACE_CALL();
     // Local version of VALUE_OR_RETURN, specific to this method's calling conventions.
     CreateTrackInput input = VALUE_OR_RETURN_STATUS(CreateTrackInput::fromAidl(_input));
     CreateTrackOutput output;
@@ -993,37 +1080,52 @@
     std::vector<audio_io_handle_t> secondaryOutputs;
     bool isSpatialized = false;
     bool isBitPerfect = false;
+    float volume;
 
-    // TODO b/182392553: refactor or make clearer
-    pid_t clientPid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(input.clientInfo.attributionSource.pid));
-    bool updatePid = (clientPid == (pid_t)-1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    uid_t clientUid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(input.clientInfo.attributionSource.uid));
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
     audio_attributes_t localAttr = input.attr;
 
-    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        ALOGW_IF(clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        clientUid = callingUid;
-        updatePid = true;
+    AttributionSourceState adjAttributionSource;
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = input.clientInfo.attributionSource;
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        uid_t clientUid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(
+                        input.clientInfo.attributionSource.uid));
+        pid_t clientPid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+                        input.clientInfo.attributionSource.pid));
+        bool updatePid = (clientPid == (pid_t)-1);
+
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            ALOGW_IF(clientUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, clientUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            clientUid = callingUid;
+            updatePid = true;
+        }
+        if (updatePid) {
+            ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, clientPid);
+            clientPid = callingPid;
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                adjAttributionSource);
+
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(input.clientInfo.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
     }
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    if (updatePid) {
-        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        clientPid = callingPid;
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
-            adjAttributionSource);
 
     audio_session_t sessionId = input.sessionId;
     if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -1039,7 +1141,7 @@
     lStatus = AudioSystem::getOutputForAttr(&localAttr, &output.outputId, sessionId, &streamType,
                                             adjAttributionSource, &input.config, input.flags,
                                             &output.selectedDeviceId, &portId, &secondaryOutputs,
-                                            &isSpatialized, &isBitPerfect);
+                                            &isSpatialized, &isBitPerfect, &volume);
 
     if (lStatus != NO_ERROR || output.outputId == AUDIO_IO_HANDLE_NONE) {
         ALOGE("createTrack() getOutputForAttr() return error %d or invalid output handle", lStatus);
@@ -1076,7 +1178,7 @@
             goto Exit;
         }
 
-        client = registerPid(clientPid);
+        client = registerPid(adjAttributionSource.pid);
 
         IAfPlaybackThread* effectThread = nullptr;
         sp<IAfEffectChain> effectChain = nullptr;
@@ -1096,7 +1198,7 @@
         if (effectThread == nullptr) {
             effectChain = getOrphanEffectChain_l(sessionId);
         }
-        ALOGV("createTrack() sessionId: %d", sessionId);
+        ALOGV("createTrack() sessionId: %d volume: %f", sessionId, volume);
 
         output.sampleRate = input.config.sample_rate;
         output.frameCount = input.frameCount;
@@ -1111,7 +1213,7 @@
                                       input.sharedBuffer, sessionId, &output.flags,
                                       callingPid, adjAttributionSource, input.clientInfo.clientTid,
                                       &lStatus, portId, input.audioTrackCallback, isSpatialized,
-                                      isBitPerfect, &output.afTrackFlags);
+                                      isBitPerfect, &output.afTrackFlags, volume);
         LOG_ALWAYS_FATAL_IF((lStatus == NO_ERROR) && (track == 0));
         // we don't abort yet if lStatus != NO_ERROR; there is still work to be done regardless
 
@@ -1562,6 +1664,33 @@
     return NO_ERROR;
 }
 
+status_t AudioFlinger::setPortsVolume(
+        const std::vector<audio_port_handle_t>& ports, float volume, audio_io_handle_t output)
+{
+    for (const auto& port : ports) {
+        if (port == AUDIO_PORT_HANDLE_NONE) {
+            return BAD_VALUE;
+        }
+    }
+    if (isnan(volume) || volume > 1.0f || volume < 0.0f) {
+        return BAD_VALUE;
+    }
+    if (output == AUDIO_IO_HANDLE_NONE) {
+        return BAD_VALUE;
+    }
+    audio_utils::lock_guard lock(mutex());
+    IAfPlaybackThread *thread = checkPlaybackThread_l(output);
+    if (thread != nullptr) {
+        return thread->setPortsVolume(ports, volume);
+    }
+    const sp<IAfMmapThread> mmapThread = checkMmapThread_l(output);
+    if (mmapThread != nullptr && mmapThread->isOutput()) {
+        IAfMmapPlaybackThread *mmapPlaybackThread = mmapThread->asIAfMmapPlaybackThread().get();
+        return mmapPlaybackThread->setPortsVolume(ports, volume);
+    }
+    return BAD_VALUE;
+}
+
 status_t AudioFlinger::setRequestedLatencyMode(
         audio_io_handle_t output, audio_latency_mode_t mode) {
     if (output == AUDIO_IO_HANDLE_NONE) {
@@ -1664,37 +1793,6 @@
     return NO_ERROR;
 }
 
-float AudioFlinger::streamVolume(audio_stream_type_t stream, audio_io_handle_t output) const
-{
-    status_t status = checkStreamType(stream);
-    if (status != NO_ERROR) {
-        return 0.0f;
-    }
-    if (output == AUDIO_IO_HANDLE_NONE) {
-        return 0.0f;
-    }
-
-    audio_utils::lock_guard lock(mutex());
-    sp<VolumeInterface> volumeInterface = getVolumeInterface_l(output);
-    if (volumeInterface == NULL) {
-        return 0.0f;
-    }
-
-    return volumeInterface->streamVolume(stream);
-}
-
-bool AudioFlinger::streamMute(audio_stream_type_t stream) const
-{
-    status_t status = checkStreamType(stream);
-    if (status != NO_ERROR) {
-        return true;
-    }
-
-    audio_utils::lock_guard lock(mutex());
-    return streamMute_l(stream);
-}
-
-
 void AudioFlinger::broadcastParametersToRecordThreads_l(const String8& keyValuePairs)
 {
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
@@ -1935,10 +2033,11 @@
     if (mPrimaryHardwareDev == nullptr) {
         return 0;
     }
+    if (mInputBufferSizeOrderedDevs.empty()) {
+        return 0;
+    }
     mHardwareStatus = AUDIO_HW_GET_INPUT_BUFFER_SIZE;
 
-    sp<DeviceHalInterface> dev = mPrimaryHardwareDev.load()->hwDevice();
-
     std::vector<audio_channel_mask_t> channelMasks = {channelMask};
     if (channelMask != AUDIO_CHANNEL_IN_MONO) {
         channelMasks.push_back(AUDIO_CHANNEL_IN_MONO);
@@ -1968,6 +2067,22 @@
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
+    auto getInputBufferSize = [](const sp<DeviceHalInterface>& dev, audio_config_t config,
+                                 size_t* bytes) -> status_t {
+        if (!dev) {
+            return BAD_VALUE;
+        }
+        status_t result = dev->getInputBufferSize(&config, bytes);
+        if (result == BAD_VALUE) {
+            // Retry with the config suggested by the HAL.
+            result = dev->getInputBufferSize(&config, bytes);
+        }
+        if (result != OK || *bytes == 0) {
+            return BAD_VALUE;
+        }
+        return result;
+    };
+
     // Change parameters of the configuration each iteration until we find a
     // configuration that the device will support, or HAL suggests what it supports.
     audio_config_t config = AUDIO_CONFIG_INITIALIZER;
@@ -1979,16 +2094,15 @@
                 config.sample_rate = testSampleRate;
 
                 size_t bytes = 0;
-                audio_config_t loopConfig = config;
-                status_t result = dev->getInputBufferSize(&config, &bytes);
-                if (result == BAD_VALUE) {
-                    // Retry with the config suggested by the HAL.
-                    result = dev->getInputBufferSize(&config, &bytes);
+                ret = BAD_VALUE;
+                for (const AudioHwDevice* dev : mInputBufferSizeOrderedDevs) {
+                    ret = getInputBufferSize(dev->hwDevice(), config, &bytes);
+                    if (ret == OK) {
+                        break;
+                    }
                 }
-                if (result != OK || bytes == 0) {
-                    config = loopConfig;
-                    continue;
-                }
+                if (ret == BAD_VALUE) continue;
+
                 if (config.sample_rate != sampleRate || config.channel_mask != channelMask ||
                     config.format != format) {
                     uint32_t dstChannelCount = audio_channel_count_from_in_mask(channelMask);
@@ -2064,24 +2178,22 @@
 
 void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
 {
-    audio_utils::lock_guard _l(mutex());
     if (client == 0) {
         return;
     }
-    pid_t pid = IPCThreadState::self()->getCallingPid();
+    const pid_t pid = IPCThreadState::self()->getCallingPid();
     const uid_t uid = IPCThreadState::self()->getCallingUid();
+
+    audio_utils::lock_guard _l(mutex());
     {
         audio_utils::lock_guard _cl(clientMutex());
-        if (mNotificationClients.indexOfKey(pid) < 0) {
-            sp<NotificationClient> notificationClient = new NotificationClient(this,
-                                                                                client,
-                                                                                pid,
-                                                                                uid);
+        if (mNotificationClients.count(pid) == 0) {
+            auto notificationClient = sp<NotificationClient>::make(
+                    this, client, pid, uid);
             ALOGV("registerClient() client %p, pid %d, uid %u",
                     notificationClient.get(), pid, uid);
 
-            mNotificationClients.add(pid, notificationClient);
-
+            mNotificationClients[pid] = notificationClient;
             sp<IBinder> binder = IInterface::asBinder(client);
             binder->linkToDeath(notificationClient);
         }
@@ -2108,7 +2220,7 @@
         audio_utils::lock_guard _l(mutex());
         {
             audio_utils::lock_guard _cl(clientMutex());
-            mNotificationClients.removeItem(pid);
+            mNotificationClients.erase(pid);
         }
 
         ALOGV("%d died, releasing its sessions", pid);
@@ -2149,11 +2261,13 @@
             legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
 
     audio_utils::lock_guard _l(clientMutex());
-    size_t size = mNotificationClients.size();
-    for (size_t i = 0; i < size; i++) {
-        if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
-            mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(eventAidl,
-                                                                                   descAidl);
+    if (pid != 0) {
+        if (auto it = mNotificationClients.find(pid); it != mNotificationClients.end()) {
+            it->second->audioFlingerClient()->ioConfigChanged(eventAidl, descAidl);
+        }
+    } else {
+        for (const auto& [ client_pid, client] : mNotificationClients) {
+            client->audioFlingerClient()->ioConfigChanged(eventAidl, descAidl);
         }
     }
 }
@@ -2167,12 +2281,24 @@
 
     audio_utils::lock_guard _l(clientMutex());
     size_t size = mNotificationClients.size();
-    for (size_t i = 0; i < size; i++) {
-        mNotificationClients.valueAt(i)->audioFlingerClient()
-                ->onSupportedLatencyModesChanged(outputAidl, modesAidl);
+    for (const auto& [_, client] : mNotificationClients) {
+        client->audioFlingerClient()->onSupportedLatencyModesChanged(outputAidl, modesAidl);
     }
 }
 
+void AudioFlinger::onHardError(std::set<audio_port_handle_t>& trackPortIds) {
+    ALOGI("releasing tracks due to a hard error occurred on an I/O thread");
+    for (const auto portId : trackPortIds) {
+        AudioSystem::releaseOutput(portId);
+    }
+}
+
+const IPermissionProvider& AudioFlinger::getPermissionProvider() {
+    // This is inited as part of service construction, prior to binder registration,
+    // so it should always be non-null.
+    return mAudioPolicyServiceLocal.load()->getPermissionProvider();
+}
+
 // removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -2215,6 +2341,9 @@
                                                      pid_t pid,
                                                      uid_t uid)
     : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
+    , mClientToken(media::psh_utils::AudioPowerManager::enabled()
+            ? media::psh_utils::createAudioClientToken(pid, uid)
+            : nullptr)
 {
 }
 
@@ -2224,7 +2353,7 @@
 
 void AudioFlinger::NotificationClient::binderDied(const wp<IBinder>& who __unused)
 {
-    sp<NotificationClient> keep(this);
+    const auto keep = sp<NotificationClient>::fromExisting(this);
     mAudioFlinger->removeNotificationClient(mPid);
 }
 
@@ -2282,30 +2411,43 @@
     output.buffers.clear();
     output.inputId = AUDIO_IO_HANDLE_NONE;
 
-    // TODO b/182392553: refactor or clean up
-    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    bool updatePid = (adjAttributionSource.pid == -1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
-           adjAttributionSource.uid));
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        ALOGW_IF(currentUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, currentUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        updatePid = true;
+    AttributionSourceState adjAttributionSource;
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = input.clientInfo.attributionSource;
+        bool updatePid = (adjAttributionSource.pid == -1);
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
+               adjAttributionSource.uid));
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            ALOGW_IF(currentUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, currentUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            updatePid = true;
+        }
+        const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+                adjAttributionSource.pid));
+        if (updatePid) {
+            ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, currentPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(
+                    input.clientInfo.attributionSource,
+                    getPermissionProvider()
+                    ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
     }
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
-            adjAttributionSource.pid));
-    if (updatePid) {
-        ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, currentPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
-            adjAttributionSource);
+
     // further format checks are performed by createRecordTrack_l()
     if (!audio_is_valid_format(input.config.format)) {
         ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -2610,12 +2752,43 @@
     }
 
     mAudioHwDevs.add(handle, audioDevice);
+    if (strcmp(name, AUDIO_HARDWARE_MODULE_ID_STUB) != 0) {
+        mInputBufferSizeOrderedDevs.insert(audioDevice);
+    }
 
     ALOGI("loadHwModule() Loaded %s audio interface, handle %d", name, handle);
 
     return audioDevice;
 }
 
+// Sort AudioHwDevice to be traversed in the getInputBufferSize call in the following order:
+// Primary, Usb, Bluetooth, A2DP, other modules, remote submix.
+/* static */
+bool AudioFlinger::inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs) {
+    static const std::map<std::string_view, int> kPriorities = {
+        { AUDIO_HARDWARE_MODULE_ID_PRIMARY, 0 }, { AUDIO_HARDWARE_MODULE_ID_USB, 1 },
+        { AUDIO_HARDWARE_MODULE_ID_BLUETOOTH, 2 }, { AUDIO_HARDWARE_MODULE_ID_A2DP, 3 },
+        { AUDIO_HARDWARE_MODULE_ID_REMOTE_SUBMIX, std::numeric_limits<int>::max() }
+    };
+
+    const std::string_view lhsName = lhs->moduleName();
+    const std::string_view rhsName = rhs->moduleName();
+
+    auto lhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto lhsIt = kPriorities.find(lhsName); lhsIt != kPriorities.end()) {
+        lhsPriority = lhsIt->second;
+    }
+    auto rhsPriority = std::numeric_limits<int>::max() - 1;
+    if (const auto rhsIt = kPriorities.find(rhsName); rhsIt != kPriorities.end()) {
+        rhsPriority = rhsIt->second;
+    }
+
+    if (lhsPriority != rhsPriority) {
+        return lhsPriority < rhsPriority;
+    }
+    return lhsName < rhsName;
+}
+
 // ----------------------------------------------------------------------------
 
 uint32_t AudioFlinger::getPrimaryOutputSamplingRate() const
@@ -2866,7 +3039,8 @@
                                                         audio_config_base_t *mixerConfig,
                                                         audio_devices_t deviceType,
                                                         const String8& address,
-                                                        audio_output_flags_t flags)
+                                                        audio_output_flags_t *flags,
+                                                        const audio_attributes_t attributes)
 {
     AudioHwDevice *outHwDev = findSuitableHwDev_l(module, deviceType);
     if (outHwDev == NULL) {
@@ -2884,18 +3058,23 @@
 
     mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
     AudioStreamOut *outputStream = NULL;
+
+    playback_track_metadata_v7_t trackMetadata;
+    trackMetadata.base.usage = attributes.usage;
+
     status_t status = outHwDev->openOutputStream(
             &outputStream,
             *output,
             deviceType,
             flags,
             halConfig,
-            address.c_str());
+            address.c_str(),
+            {trackMetadata});
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
     if (status == NO_ERROR) {
-        if (flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
+        if (*flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) {
             const sp<IAfMmapPlaybackThread> thread = IAfMmapPlaybackThread::create(
                     this, *output, outHwDev, outputStream, mSystemReady);
             mMmapThreads.add(*output, thread);
@@ -2904,22 +3083,22 @@
             return thread;
         } else {
             sp<IAfPlaybackThread> thread;
-            if (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+            if (*flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
                 thread = IAfPlaybackThread::createBitPerfectThread(
                         this, outputStream, *output, mSystemReady);
                 ALOGV("%s() created bit-perfect output: ID %d thread %p",
                       __func__, *output, thread.get());
-            } else if (flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
+            } else if (*flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) {
                 thread = IAfPlaybackThread::createSpatializerThread(this, outputStream, *output,
                                                     mSystemReady, mixerConfig);
                 ALOGV("openOutput_l() created spatializer output: ID %d thread %p",
                       *output, thread.get());
-            } else if (flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
+            } else if (*flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) {
                 thread = IAfPlaybackThread::createOffloadThread(this, outputStream, *output,
                         mSystemReady, halConfig->offload_info);
                 ALOGV("openOutput_l() created offload output: ID %d thread %p",
                       *output, thread.get());
-            } else if ((flags & AUDIO_OUTPUT_FLAG_DIRECT)
+            } else if ((*flags & AUDIO_OUTPUT_FLAG_DIRECT)
                     || !IAfThreadBase::isValidPcmSinkFormat(halConfig->format)
                     || !IAfThreadBase::isValidPcmSinkChannelMask(halConfig->channel_mask)) {
                 thread = IAfPlaybackThread::createDirectOutputThread(this, outputStream, *output,
@@ -2959,6 +3138,8 @@
             aidl2legacy_DeviceDescriptorBase(request.device));
     audio_output_flags_t flags = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
+    audio_attributes_t attributes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioAttributes_audio_attributes_t(request.attributes));
 
     audio_io_handle_t output;
 
@@ -2981,7 +3162,7 @@
     audio_utils::lock_guard _l(mutex());
 
     const sp<IAfThreadBase> thread = openOutput_l(module, &output, &halConfig,
-            &mixerConfig, deviceType, address, flags);
+            &mixerConfig, deviceType, address, &flags, attributes);
     if (thread != 0) {
         uint32_t latencyMs = 0;
         if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
@@ -3439,7 +3620,7 @@
         // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be
         // called from a different pid leaving a stale session reference.  Also we don't know how
         // to clear this reference if the client process dies.
-        if (mNotificationClients.indexOfKey(caller) < 0) {
+        if (mNotificationClients.count(caller) == 0) {
             ALOGW("acquireAudioSessionId() unknown client %d for session %d", caller, audioSession);
             return;
         }
@@ -3700,8 +3881,7 @@
 
 
 // checkPlaybackThread_l() must be called with AudioFlinger::mutex() held
-sp<VolumeInterface> AudioFlinger::getVolumeInterface_l(audio_io_handle_t output) const
-{
+sp<VolumeInterface> AudioFlinger::getVolumeInterface_l(audio_io_handle_t output) const {
     sp<VolumeInterface> volumeInterface = mPlaybackThreads.valueFor(output).get();
     if (volumeInterface == nullptr) {
         IAfMmapThread* const mmapThread = mMmapThreads.valueFor(output).get();
@@ -3755,7 +3935,11 @@
 
 IAfPlaybackThread* AudioFlinger::primaryPlaybackThread_l() const
 {
-    audio_utils::lock_guard lock(hardwareMutex());
+    // The atomic ptr mPrimaryHardwareDev requires both the
+    // AudioFlinger and the Hardware mutex for modification.
+    // As we hold the AudioFlinger mutex, we access it
+    // safely without the Hardware mutex, to avoid mutex order
+    // inversion with Thread methods and the ThreadBase mutex.
     if (mPrimaryHardwareDev == nullptr) {
         return nullptr;
     }
@@ -3879,8 +4063,12 @@
         // TODO: We could check compatibility of the secondaryThread with the PatchTrack
         // for fast usage: thread has fast mixer, sample rate matches, etc.;
         // for now, we exclude fast tracks by removing the Fast flag.
+        constexpr audio_output_flags_t kIncompatiblePatchTrackFlags =
+                static_cast<audio_output_flags_t>(AUDIO_OUTPUT_FLAG_FAST
+                        | AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+
         const audio_output_flags_t outputFlags =
-                (audio_output_flags_t)(track->getOutputFlags() & ~AUDIO_OUTPUT_FLAG_FAST);
+                (audio_output_flags_t)(track->getOutputFlags() & ~kIncompatiblePatchTrackFlags);
         sp<IAfPatchTrack> patchTrack = IAfPatchTrack::create(secondaryThread,
                                                        track->streamType(),
                                                        track->sampleRate(),
@@ -3891,7 +4079,9 @@
                                                        patchRecord->bufferSize(),
                                                        outputFlags,
                                                        0ns /* timeout */,
-                                                       frameCountToBeReady);
+                                                       frameCountToBeReady,
+                                                       track->getSpeed(),
+                                                       1.f /* volume */);
         status = patchTrack->initCheck();
         if (status != NO_ERROR) {
             ALOGE("Secondary output patchTrack init failed: %d", status);
@@ -4059,20 +4249,31 @@
     int idOut = -1;
 
     status_t lStatus = NO_ERROR;
-
-    // TODO b/182392553: refactor or make clearer
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-    pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
-    if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(currentPid != -1 && currentPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, currentPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-        currentPid = callingPid;
+    uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    pid_t currentPid;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+        currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
+        if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW_IF(currentPid != -1 && currentPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, currentPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+            currentPid = callingPid;
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(request.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+        currentPid = adjAttributionSource.pid;
     }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
           adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
@@ -4429,7 +4630,7 @@
             // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
             // for the HapticGenerator.
             const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
-                    std::move(getDefaultVibratorInfo_l());
+                    getDefaultVibratorInfo_l();
             if (defaultVibratorInfo) {
                 // Only set the vibrator info when it is a valid one.
                 audio_utils::lock_guard _cl(chain->mutex());
@@ -4509,6 +4710,10 @@
             if ((pt->type() == IAfThreadBase::MIXER || pt->type() == IAfThreadBase::OFFLOAD) &&
                     ((sessionType & IAfThreadBase::EFFECT_SESSION) != 0)) {
                 srcThread = pt.get();
+                if (srcThread == dstThread) {
+                    ALOGD("%s() same dst and src threads, ignoring move", __func__);
+                    return NO_ERROR;
+                }
                 ALOGW("%s() found srcOutput %d hosting AUDIO_SESSION_OUTPUT_MIX", __func__,
                       pt->id());
                 break;
@@ -4915,6 +5120,30 @@
     return mPatchPanel->getAudioMixPort_l(devicePort, mixPort);
 }
 
+status_t AudioFlinger::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    audio_utils::lock_guard _l(mutex());
+    ALOGV("%s", __func__);
+
+    std::map<audio_port_handle_t, bool> tracksInternalMuteMap;
+    for (const auto& trackInternalMute : tracksInternalMute) {
+        audio_port_handle_t portId = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_int32_t_audio_port_handle_t(trackInternalMute.portId));
+        tracksInternalMuteMap.emplace(portId, trackInternalMute.muted);
+    }
+    for (size_t i = 0; i < mPlaybackThreads.size() && !tracksInternalMuteMap.empty(); i++) {
+        mPlaybackThreads.valueAt(i)->setTracksInternalMute(&tracksInternalMuteMap);
+    }
+    return NO_ERROR;
+}
+
+status_t AudioFlinger::resetReferencesForTest() {
+    mDeviceEffectManager.clear();
+    mPatchPanel.clear();
+    mMelReporter->resetReferencesForTest();
+    return NO_ERROR;
+}
+
 // ----------------------------------------------------------------------------
 
 status_t AudioFlinger::onTransactWrapper(TransactionCode code,
@@ -4949,6 +5178,9 @@
         case TransactionCode::INVALIDATE_TRACKS:
         case TransactionCode::GET_AUDIO_POLICY_CONFIG:
         case TransactionCode::GET_AUDIO_MIX_PORT:
+        case TransactionCode::SET_TRACKS_INTERNAL_MUTE:
+        case TransactionCode::RESET_REFERENCES_FOR_TEST:
+        case TransactionCode::SET_PORTS_VOLUME:
             ALOGW("%s: transaction %d received from PID %d",
                   __func__, static_cast<int>(code), IPCThreadState::self()->getCallingPid());
             // return status only for non void methods
@@ -5034,9 +5266,9 @@
         } else {
             getIAudioFlingerStatistics().event(code, elapsedMs);
         }
-    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
-    mediautils::TimeCheck::kDefaultSecondChanceDuration,
-    true /* crashOnTimeout */);
+    }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
+    mediautils::TimeCheck::getDefaultSecondChanceDuration(),
+    !property_get_bool("audio.timecheck.disabled", false) /* crashOnTimeout */);
 
     return delegate();
 }
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 4e46bea..50fd48c 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -33,10 +33,12 @@
 #include <audio_utils/FdToString.h>
 #include <audio_utils/SimpleLog.h>
 #include <media/IAudioFlinger.h>
+#include <media/IAudioPolicyServiceLocal.h>
 #include <media/MediaMetricsItem.h>
 #include <media/audiohal/DevicesFactoryHalInterface.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/Synchronization.h>
+#include <psh_utils/AudioPowerManager.h>
 
 // not needed with the includes above, added to prevent transitive include dependency.
 #include <utils/KeyedVector.h>
@@ -61,6 +63,8 @@
 public:
     static void instantiate() ANDROID_API;
 
+    status_t resetReferencesForTest();
+
 private:
 
     // ---- begin IAudioFlinger interface
@@ -93,9 +97,8 @@
     status_t setStreamMute(audio_stream_type_t stream, bool muted) final
             EXCLUDES_AudioFlinger_Mutex;
 
-    float streamVolume(audio_stream_type_t stream,
-            audio_io_handle_t output) const final EXCLUDES_AudioFlinger_Mutex;
-    bool streamMute(audio_stream_type_t stream) const final EXCLUDES_AudioFlinger_Mutex;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume,
+            audio_io_handle_t output) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setMode(audio_mode_t mode) final EXCLUDES_AudioFlinger_Mutex;
 
@@ -259,6 +262,10 @@
     status_t getAudioMixPort(const struct audio_port_v7* devicePort,
                              struct audio_port_v7* mixPort) const final EXCLUDES_AudioFlinger_Mutex;
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) final
+            EXCLUDES_AudioFlinger_Mutex;
+
     status_t onTransactWrapper(TransactionCode code, const Parcel& data, uint32_t flags,
             const std::function<status_t()>& delegate) final EXCLUDES_AudioFlinger_Mutex;
 
@@ -330,9 +337,12 @@
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) final REQUIRES(mutex());
+            audio_output_flags_t* flags,
+            audio_attributes_t attributes) final REQUIRES(mutex());
     const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
-            getAudioHwDevs_l() const final REQUIRES(mutex()) { return mAudioHwDevs; }
+            getAudioHwDevs_l() const final REQUIRES(mutex(), hardwareMutex()) {
+              return mAudioHwDevs;
+            }
     void updateDownStreamPatches_l(const struct audio_patch* patch,
             const std::set<audio_io_handle_t>& streams) final REQUIRES(mutex());
     void updateOutDevicesForRecordThreads_l(const DeviceDescriptorBaseVector& devices) final
@@ -397,6 +407,10 @@
     void onSupportedLatencyModesChanged(
             audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes) final
             EXCLUDES_AudioFlinger_ClientMutex;
+    void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
+            EXCLUDES_AudioFlinger_ClientMutex;
+
+    const ::com::android::media::permission::IPermissionProvider& getPermissionProvider() final;
 
     // ---- end of IAfThreadCallback interface
 
@@ -420,6 +434,13 @@
                             const sp<MmapStreamCallback>& callback,
                             sp<MmapStreamInterface>& interface,
             audio_port_handle_t *handle) EXCLUDES_AudioFlinger_Mutex;
+
+    void initAudioPolicyLocal(sp<media::IAudioPolicyServiceLocal> audioPolicyLocal) {
+        if (mAudioPolicyServiceLocal.load() == nullptr) {
+            mAudioPolicyServiceLocal = std::move(audioPolicyLocal);
+        }
+    }
+
 private:
     // FIXME The 400 is temporarily too high until a leak of writers in media.log is fixed.
     static const size_t kLogMemorySize = 400 * 1024;
@@ -479,6 +500,7 @@
         const pid_t             mPid;
         const uid_t             mUid;
         const sp<media::IAudioFlingerClient> mAudioFlingerClient;
+        const std::unique_ptr<media::psh_utils::Token> mClientToken;
     };
 
     // --- MediaLogNotifier ---
@@ -535,6 +557,7 @@
     IAfPlaybackThread* checkMixerThread_l(audio_io_handle_t output) const REQUIRES(mutex());
 
     sp<VolumeInterface> getVolumeInterface_l(audio_io_handle_t output) const REQUIRES(mutex());
+
     std::vector<sp<VolumeInterface>> getAllVolumeInterfaces_l() const REQUIRES(mutex());
 
 
@@ -630,6 +653,10 @@
     DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*> mAudioHwDevs
             GUARDED_BY(hardwareMutex()) {nullptr /* defValue */};
 
+    static bool inputBufferSizeDevsCmp(const AudioHwDevice* lhs, const AudioHwDevice* rhs);
+    std::set<AudioHwDevice*, decltype(&inputBufferSizeDevsCmp)>
+            mInputBufferSizeOrderedDevs GUARDED_BY(hardwareMutex()) {inputBufferSizeDevsCmp};
+
      const sp<DevicesFactoryHalInterface> mDevicesFactoryHal =
              DevicesFactoryHalInterface::create();
      /* const */ sp<DevicesFactoryHalCallback> mDevicesFactoryHalCallback;  // set onFirstRef().
@@ -673,8 +700,7 @@
 
     DefaultKeyedVector<audio_io_handle_t, sp<IAfRecordThread>> mRecordThreads GUARDED_BY(mutex());
 
-    DefaultKeyedVector<pid_t, sp<NotificationClient>> mNotificationClients
-            GUARDED_BY(clientMutex());
+    std::map<pid_t, sp<NotificationClient>> mNotificationClients GUARDED_BY(clientMutex());
 
                 // updated by atomic_fetch_add_explicit
     volatile atomic_uint_fast32_t mNextUniqueIds[AUDIO_UNIQUE_ID_USE_MAX];  // ctor init
@@ -752,8 +778,6 @@
     bool mSystemReady GUARDED_BY(mutex()) = false;
     std::atomic<bool> mAudioPolicyReady = false;
 
-    mediautils::UidInfo mUidInfo GUARDED_BY(mutex());
-
     // no mutex needed.
     SimpleLog  mRejectedSetParameterLog;
     SimpleLog  mAppSetParameterLog;
@@ -774,6 +798,9 @@
 
     // Bluetooth Variable latency control logic is enabled or disabled
     std::atomic<bool> mBluetoothLatencyModesEnabled = true;
+
+    // Local interface to AudioPolicyService, late inited, but logically const
+    mediautils::atomic_sp<media::IAudioPolicyServiceLocal> mAudioPolicyServiceLocal;
 };
 
 // ----------------------------------------------------------------------------
diff --git a/services/audioflinger/DeviceEffectManager.cpp b/services/audioflinger/DeviceEffectManager.cpp
index feae97e..7cb9329 100644
--- a/services/audioflinger/DeviceEffectManager.cpp
+++ b/services/audioflinger/DeviceEffectManager.cpp
@@ -71,10 +71,15 @@
 
 void DeviceEffectManager::onReleaseAudioPatch(audio_patch_handle_t handle) {
     ALOGV("%s", __func__);
+    // Keep a reference on disconnected handle to delay destruction without lock held.
+    std::vector<sp<IAfEffectHandle>> disconnectedHandles{};
     audio_utils::lock_guard _l(mutex());
     for (auto& effectProxies : mDeviceEffects) {
         for (auto& effect : effectProxies.second) {
-            effect->onReleasePatch(handle);
+            sp<IAfEffectHandle> disconnectedHandle = effect->onReleasePatch(handle);
+            if (disconnectedHandle != nullptr) {
+                disconnectedHandles.push_back(std::move(disconnectedHandle));
+            }
         }
     }
 }
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index ad043c8..84505d3 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -39,6 +39,7 @@
 #include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
+#include <system/audio_effects/audio_effects_utils.h>
 #include <system/audio_effects/effect_aec.h>
 #include <system/audio_effects/effect_downmix.h>
 #include <system/audio_effects/effect_dynamicsprocessing.h>
@@ -70,6 +71,7 @@
 namespace android {
 
 using aidl_utils::statusTFromBinderStatus;
+using android::effect::utils::EffectParamWriter;
 using audioflinger::EffectConfiguration;
 using binder::Status;
 
@@ -617,10 +619,11 @@
 
 }
 
+// return true if any effect started or stopped
 bool EffectModule::updateState_l() {
     audio_utils::lock_guard _l(mutex());
 
-    bool started = false;
+    bool startedOrStopped = false;
     switch (mState) {
     case RESTART:
         reset_l();
@@ -635,7 +638,7 @@
         }
         if (start_ll() == NO_ERROR) {
             mState = ACTIVE;
-            started = true;
+            startedOrStopped = true;
         } else {
             mState = IDLE;
         }
@@ -655,6 +658,7 @@
         // turn off sequence.
         if (--mDisableWaitCnt == 0) {
             reset_l();
+            startedOrStopped = true;
             mState = IDLE;
         }
         break;
@@ -669,7 +673,7 @@
         break;
     }
 
-    return started;
+    return startedOrStopped;
 }
 
 void EffectModule::process()
@@ -1044,8 +1048,21 @@
             return;
         }
 
-        (void)getCallback()->addEffectToHal(mEffectInterface);
-        mCurrentHalStream = getCallback()->io();
+        status_t status = getCallback()->addEffectToHal(mEffectInterface);
+        if (status == NO_ERROR) {
+            mCurrentHalStream = getCallback()->io();
+        }
+    }
+}
+
+void HwAccDeviceEffectModule::addEffectToHal_l()
+{
+    if (mAddedToHal) {
+        return;
+    }
+    status_t status = getCallback()->addEffectToHal(mEffectInterface);
+    if (status == NO_ERROR) {
+        mAddedToHal = true;
     }
 }
 
@@ -1151,6 +1168,16 @@
     return NO_ERROR;
 }
 
+status_t HwAccDeviceEffectModule::removeEffectFromHal_l()
+{
+    if (!mAddedToHal) {
+        return NO_ERROR;
+    }
+    getCallback()->removeEffectFromHal(mEffectInterface);
+    mAddedToHal = false;
+    return NO_ERROR;
+}
+
 // round up delta valid if value and divisor are positive.
 template <typename T>
 static T roundUpDelta(const T &value, const T &divisor) {
@@ -1358,26 +1385,28 @@
     }
 }
 
-status_t EffectModule::setVolume(uint32_t* left, uint32_t* right, bool controller, bool force) {
+status_t EffectModule::setVolume_l(uint32_t* left, uint32_t* right, bool controller, bool force) {
     AutoLockReentrant _l(mutex(), mSetVolumeReentrantTid);
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
     status_t status = NO_ERROR;
     // Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume
-    // if controller flag is set (Note that controller == TRUE => EFFECT_FLAG_VOLUME_CTRL set)
-    if ((isProcessEnabled() || force) &&
+    // if controller flag is set (Note that controller == TRUE => the volume controller effect in
+    // the effect chain)
+    if (((isOffloadedOrDirect_l() ? isEnabled() : isProcessEnabled()) || force) &&
             ((mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_IND ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_MONITOR)) {
-        status = setVolumeInternal(left, right, controller);
+        status = setVolumeInternal_ll(left, right, controller);
     }
     return status;
 }
 
-status_t EffectModule::setVolumeInternal(
+status_t EffectModule::setVolumeInternal_ll(
         uint32_t *left, uint32_t *right, bool controller) {
-    if (mVolume.has_value() && *left == mVolume.value()[0] && *right == mVolume.value()[1]) {
+    if (mVolume.has_value() && *left == mVolume.value()[0] && *right == mVolume.value()[1] &&
+            !controller) {
         LOG_ALWAYS_FATAL_IF(
                 !mReturnedVolume.has_value(),
                 "The cached returned volume must not be null when the cached volume has value");
@@ -1387,14 +1416,14 @@
     }
     LOG_ALWAYS_FATAL_IF(mEffectInterface == nullptr, "%s", mEffectInterfaceDebug.c_str());
     uint32_t volume[2] = {*left, *right};
-    uint32_t *pVolume = controller ? volume : nullptr;
+    uint32_t* pVolume = isVolumeControl() ? volume : nullptr;
     uint32_t size = sizeof(volume);
     status_t status = mEffectInterface->command(EFFECT_CMD_SET_VOLUME,
                                                 size,
                                                 volume,
                                                 &size,
                                                 pVolume);
-    if (controller && status == NO_ERROR && size == sizeof(volume)) {
+    if (pVolume && status == NO_ERROR && size == sizeof(volume)) {
         mVolume = {*left, *right}; // Cache the value that has been set
         *left = volume[0];
         *right = volume[1];
@@ -1558,16 +1587,27 @@
         return INVALID_OPERATION;
     }
 
-    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t) + sizeof(float));
-    effect_param_t *param = (effect_param_t*) request.data();
-    param->psize = sizeof(int32_t);
-    param->vsize = sizeof(int32_t) * 2 + sizeof(float);
-    *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
-    int32_t* hapticScalePtr = reinterpret_cast<int32_t*>(param->data + sizeof(int32_t));
-    hapticScalePtr[0] = id;
-    hapticScalePtr[1] = static_cast<int32_t>(hapticScale.getLevel());
-    float* adaptiveScaleFactorPtr = reinterpret_cast<float*>(param->data + 3 * sizeof(int32_t));
-    *adaptiveScaleFactorPtr = hapticScale.getAdaptiveScaleFactor();
+    // Scale param fields
+    int32_t intensityParam = static_cast<int32_t>(HG_PARAM_HAPTIC_INTENSITY);
+    int32_t scaleLevel = static_cast<int32_t>(hapticScale.getLevel());
+    float scaleFactor = hapticScale.getScaleFactor();
+    float adaptiveScaleFactor = hapticScale.getAdaptiveScaleFactor();
+
+    size_t psize = sizeof(int32_t); // HG_PARAM_HAPTIC_INTENSITY
+    size_t vsize = 2 * sizeof(int32_t) + 2 * sizeof(float); // id + scale fields
+    std::vector<uint8_t> request(sizeof(effect_param_t) + psize + vsize);
+    effect_param_t *effectParam = (effect_param_t*) request.data();
+    effectParam->psize = psize;
+    effectParam->vsize = vsize;
+
+    EffectParamWriter writer(*effectParam);
+    writer.writeToParameter(&intensityParam);
+    writer.writeToValue(&id);
+    writer.writeToValue(&scaleLevel);
+    writer.writeToValue(&scaleFactor);
+    writer.writeToValue(&adaptiveScaleFactor);
+    writer.finishValueWrite();
+
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1586,17 +1626,21 @@
         return INVALID_OPERATION;
     }
 
-    const size_t paramCount = 3;
-    std::vector<uint8_t> request(
-            sizeof(effect_param_t) + sizeof(int32_t) + paramCount * sizeof(float));
-    effect_param_t *param = (effect_param_t*) request.data();
-    param->psize = sizeof(int32_t);
-    param->vsize = paramCount * sizeof(float);
-    *(int32_t*)param->data = HG_PARAM_VIBRATOR_INFO;
-    float* vibratorInfoPtr = reinterpret_cast<float*>(param->data + sizeof(int32_t));
-    vibratorInfoPtr[0] = vibratorInfo.resonantFrequency;
-    vibratorInfoPtr[1] = vibratorInfo.qFactor;
-    vibratorInfoPtr[2] = vibratorInfo.maxAmplitude;
+    size_t psize = sizeof(int32_t); // HG_PARAM_VIBRATOR_INFO
+    size_t vsize = 3 * sizeof(float); // resonantFrequency + qFactor + maxAmplitude
+    std::vector<uint8_t> request(sizeof(effect_param_t) + psize + vsize);
+    effect_param_t *effectParam = (effect_param_t*) request.data();
+    effectParam->psize = psize;
+    effectParam->vsize = vsize;
+
+    int32_t infoParam = static_cast<int32_t>(HG_PARAM_VIBRATOR_INFO);
+    EffectParamWriter writer(*effectParam);
+    writer.writeToParameter(&infoParam);
+    writer.writeToValue(&vibratorInfo.resonantFrequency);
+    writer.writeToValue(&vibratorInfo.qFactor);
+    writer.writeToValue(&vibratorInfo.maxAmplitude);
+    writer.finishValueWrite();
+
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1733,6 +1777,9 @@
         const sp<media::IEffectClient>& effectClient,
         int32_t priority, bool notifyFramesProcessed)
 {
+    if (client == nullptr && effectClient == nullptr) {
+        return sp<InternalEffectHandle>::make(effect, notifyFramesProcessed);
+    }
     return sp<EffectHandle>::make(
             effect, client, effectClient, priority, notifyFramesProcessed);
 }
@@ -1740,12 +1787,14 @@
 EffectHandle::EffectHandle(const sp<IAfEffectBase>& effect,
                                          const sp<Client>& client,
                                          const sp<media::IEffectClient>& effectClient,
-                                         int32_t priority, bool notifyFramesProcessed)
-    : BnEffect(),
+                                         int32_t priority, bool notifyFramesProcessed,
+                                         bool isInternal,
+                                         audio_utils::MutexOrder mutexOrder)
+    : BnEffect(), mMutex(mutexOrder),
     mEffect(effect), mEffectClient(media::EffectClientAsyncProxy::makeIfNeeded(effectClient)),
     mClient(client), mCblk(nullptr),
     mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false),
-    mNotifyFramesProcessed(notifyFramesProcessed)
+    mNotifyFramesProcessed(notifyFramesProcessed), mIsInternal(isInternal)
 {
     ALOGV("constructor %p client %p", this, client.get());
     setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
@@ -1912,7 +1961,7 @@
 
 void EffectHandle::disconnect(bool unpinIfLast)
 {
-    audio_utils::lock_guard _l(mutex());
+    audio_utils::unique_lock _l(mutex());
     ALOGV("disconnect(%s) %p", unpinIfLast ? "true" : "false", this);
     if (mDisconnected) {
         if (unpinIfLast) {
@@ -1924,11 +1973,19 @@
     {
         sp<IAfEffectBase> effect = mEffect.promote();
         if (effect != 0) {
+            // Unlock e.g. for device effect: may need to acquire AudioFlinger lock
+            // Also Internal Effect Handle would require Proxy lock (and vice versa).
+            if (isInternal()) {
+                _l.unlock();
+            }
             if (effect->disconnectHandle(this, unpinIfLast) > 0) {
                 ALOGW("%s Effect handle %p disconnected after thread destruction",
                     __func__, this);
             }
             effect->updatePolicyState();
+            if (isInternal()) {
+                _l.lock();
+            }
         }
     }
 
@@ -2308,6 +2365,9 @@
     }
     bool doResetVolume = false;
     for (size_t i = 0; i < size; i++) {
+        // reset volume when any effect just started or stopped.
+        // resetVolume_l will check if the volume controller effect in the chain needs update and
+        // apply the correct volume
         doResetVolume = mEffects[i]->updateState_l() || doResetVolume;
     }
     if (doResetVolume) {
@@ -2425,7 +2485,7 @@
             // volume will be set from setVolume_l.
             uint32_t left = 0;
             uint32_t right = 0;
-            effect->setVolume(&left, &right, true /*controller*/, true /*force*/);
+            effect->setVolume_l(&left, &right, true /*controller*/, true /*force*/);
         }
     }
 
@@ -2623,6 +2683,7 @@
 
     // first update volume controller
     const auto volumeControlIndex = findVolumeControl_l(0, size);
+    // index of the effect chain volume controller
     const int ctrlIdx = volumeControlIndex.value_or(-1);
     const sp<IAfEffectModule> volumeControlEffect =
             volumeControlIndex.has_value() ? mEffects[ctrlIdx] : nullptr;
@@ -2639,12 +2700,15 @@
     mVolumeControlEffect = volumeControlEffect;
 
     for (int i = 0; i < ctrlIdx; ++i) {
-        // For all volume control effects before the effect that controls volume, set the volume
+        // For all effects before the effect that controls volume, they are not controlling the
+        // effect chain volume, if these effects has the volume control capability, set the volume
         // to maximum to avoid double attenuation.
         if (mEffects[i]->isVolumeControl()) {
             uint32_t leftMax = 1 << 24;
             uint32_t rightMax = 1 << 24;
-            mEffects[i]->setVolume(&leftMax, &rightMax, true /*controller*/, true /*force*/);
+            mEffects[i]->setVolume_l(&leftMax, &rightMax,
+                                     false /* not an effect chain volume controller */,
+                                     true /* force */);
         }
     }
 
@@ -2653,9 +2717,13 @@
 
     // second get volume update from volume controller
     if (ctrlIdx >= 0) {
-        mEffects[ctrlIdx]->setVolume(&newLeft, &newRight, true);
+        mEffects[ctrlIdx]->setVolume_l(&newLeft, &newRight,
+                                       true /* effect chain volume controller */);
         mNewLeftVolume = newLeft;
         mNewRightVolume = newRight;
+        ALOGD("%s sessionId %d volume controller effect %s set (%d, %d), ret (%d, %d)", __func__,
+              mSessionId, mEffects[ctrlIdx]->desc().name, mLeftVolume, mRightVolume, newLeft,
+              newRight);
     }
     // then indicate volume to all other effects in chain.
     // Pass altered volume to effects before volume controller
@@ -2674,9 +2742,11 @@
         }
         // Pass requested volume directly if this is volume monitor module
         if (mEffects[i]->isVolumeMonitor()) {
-            mEffects[i]->setVolume(left, right, false);
+            mEffects[i]->setVolume_l(left, right,
+                                     false /* not an effect chain volume controller */);
         } else {
-            mEffects[i]->setVolume(&lVol, &rVol, false);
+            mEffects[i]->setVolume_l(&lVol, &rVol,
+                                     false /* not an effect chain volume controller */);
         }
     }
     *left = newLeft;
@@ -3196,7 +3266,9 @@
     }
 
     if (mThreadType == IAfThreadBase::SPATIALIZER) {
-        if (c->sessionId() == AUDIO_SESSION_OUTPUT_STAGE) {
+        if (c->sessionId() == AUDIO_SESSION_OUTPUT_MIX) {
+            return t->mixerChannelMask();
+        } else if (c->sessionId() == AUDIO_SESSION_OUTPUT_STAGE) {
             if (c->isFirstEffect_l(id)) {
                 return t->mixerChannelMask();
             } else {
@@ -3243,7 +3315,8 @@
                 return t->channelMask();
             }
         } else {
-            return t->channelMask();
+            return (c->sessionId() == AUDIO_SESSION_OUTPUT_MIX) ? t->mixerChannelMask()
+                                                                : t->channelMask();
         }
     } else {
         return t->channelMask();
@@ -3494,19 +3567,17 @@
             ALOGV("%s reusing HAL effect", __func__);
         } else {
             mDevicePort = *port;
-            mHalEffect = new EffectModule(mMyCallback,
-                                      const_cast<effect_descriptor_t *>(&mDescriptor),
-                                      mMyCallback->newEffectId(), AUDIO_SESSION_DEVICE,
-                                      false /* pinned */, port->id);
+            mHalEffect = sp<HwAccDeviceEffectModule>::make(mMyCallback,
+                    const_cast<effect_descriptor_t *>(&mDescriptor), mMyCallback->newEffectId(),
+                    port->id);
+            mHalEffect->configure_l();
             if (audio_is_input_device(mDevice.mType)) {
                 mHalEffect->setInputDevice(mDevice);
             } else {
                 mHalEffect->setDevices({mDevice});
             }
-            mHalEffect->configure_l();
         }
-        *handle = new EffectHandle(mHalEffect, nullptr, nullptr, 0 /*priority*/,
-                                   mNotifyFramesProcessed);
+        *handle = sp<InternalEffectHandle>::make(mHalEffect, mNotifyFramesProcessed);
         status = (*handle)->initCheck();
         if (status == OK) {
             status = mHalEffect->addHandle((*handle).get());
@@ -3553,15 +3624,16 @@
     return status;
 }
 
-void DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
-    sp<IAfEffectHandle> effect;
+sp<IAfEffectHandle> DeviceEffectProxy::onReleasePatch(audio_patch_handle_t patchHandle) {
+    sp<IAfEffectHandle> disconnectedHandle;
     {
         audio_utils::lock_guard _l(proxyMutex());
         if (mEffectHandles.find(patchHandle) != mEffectHandles.end()) {
-            effect = mEffectHandles.at(patchHandle);
+            disconnectedHandle = std::move(mEffectHandles.at(patchHandle));
             mEffectHandles.erase(patchHandle);
         }
     }
+    return disconnectedHandle;
 }
 
 
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index b516c37..9ecf89e 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -179,7 +179,7 @@
 // the attached track(s) to accumulate their auxiliary channel.
 class EffectModule : public IAfEffectModule, public EffectBase {
 public:
-    EffectModule(const sp<EffectCallbackInterface>& callabck,
+    EffectModule(const sp<EffectCallbackInterface>& callback,
                     effect_descriptor_t *desc,
                     int id,
                     audio_session_t sessionId,
@@ -217,7 +217,8 @@
     }
     status_t setDevices(const AudioDeviceTypeAddrVector& devices) final EXCLUDES_EffectBase_Mutex;
     status_t setInputDevice(const AudioDeviceTypeAddr& device) final EXCLUDES_EffectBase_Mutex;
-    status_t setVolume(uint32_t *left, uint32_t *right, bool controller, bool force) final;
+    status_t setVolume_l(uint32_t* left, uint32_t* right, bool controller, bool force) final
+            REQUIRES(audio_utils::EffectChain_Mutex);
     status_t setMode(audio_mode_t mode) final EXCLUDES_EffectBase_Mutex;
     status_t setAudioSource(audio_source_t source) final EXCLUDES_EffectBase_Mutex;
     status_t start_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
@@ -227,7 +228,7 @@
             REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
     bool isOffloaded_l() const final
             REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
-    void addEffectToHal_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    void addEffectToHal_l() override REQUIRES(audio_utils::EffectChain_Mutex);
     void release_l(const std::string& from = "") final REQUIRES(audio_utils::EffectChain_Mutex);
 
     sp<IAfEffectModule> asEffectModule() final { return this; }
@@ -249,6 +250,9 @@
 
     void dump(int fd, const Vector<String16>& args) const final;
 
+protected:
+    sp<EffectHalInterface> mEffectInterface; // Effect module HAL
+
 private:
 
     // Maximum time allocated to effect engines to complete the turn off sequence
@@ -258,18 +262,18 @@
 
     status_t start_ll() REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
     status_t stop_ll() REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
-    status_t removeEffectFromHal_l() REQUIRES(audio_utils::EffectChain_Mutex);
+    status_t removeEffectFromHal_l() override REQUIRES(audio_utils::EffectChain_Mutex);
     status_t sendSetAudioDevicesCommand(const AudioDeviceTypeAddrVector &devices, uint32_t cmdCode);
     effect_buffer_access_e requiredEffectBufferAccessMode() const {
         return mConfig.inputCfg.buffer.raw == mConfig.outputCfg.buffer.raw
                 ? EFFECT_BUFFER_ACCESS_WRITE : EFFECT_BUFFER_ACCESS_ACCUMULATE;
     }
 
-    status_t setVolumeInternal(uint32_t *left, uint32_t *right, bool controller);
-
+    status_t setVolumeInternal_ll(uint32_t* left, uint32_t* right,
+                                  bool controller /* the volume controller effect of the chain */)
+            REQUIRES(audio_utils::EffectChain_Mutex, audio_utils::EffectBase_Mutex);
 
     effect_config_t     mConfig;    // input and output audio configuration
-    sp<EffectHalInterface> mEffectInterface; // Effect module HAL
     sp<EffectBufferHalInterface> mInBuffer;  // Buffers for interacting with HAL
     sp<EffectBufferHalInterface> mOutBuffer;
     status_t            mStatus;    // initialization status
@@ -291,12 +295,12 @@
     template <typename MUTEX>
     class AutoLockReentrant {
     public:
-        AutoLockReentrant(MUTEX& mutex, pid_t allowedTid)
+        AutoLockReentrant(MUTEX& mutex, pid_t allowedTid) ACQUIRE(audio_utils::EffectBase_Mutex)
             : mMutex(gettid() == allowedTid ? nullptr : &mutex)
         {
             if (mMutex != nullptr) mMutex->lock();
         }
-        ~AutoLockReentrant() {
+        ~AutoLockReentrant() RELEASE(audio_utils::EffectBase_Mutex) {
             if (mMutex != nullptr) mMutex->unlock();
         }
     private:
@@ -313,7 +317,19 @@
     // here is used to indicate the volume to apply before this effect.
     std::optional<std::vector<uint32_t>> mReturnedVolume;
     // TODO: b/315995877, remove this debugging string after root cause
-    std::string mEffectInterfaceDebug;
+    std::string mEffectInterfaceDebug GUARDED_BY(audio_utils::EffectChain_Mutex);
+};
+
+class HwAccDeviceEffectModule : public EffectModule {
+public:
+    HwAccDeviceEffectModule(const sp<EffectCallbackInterface>& callback, effect_descriptor_t *desc,
+            int id, audio_port_handle_t deviceId) :
+        EffectModule(callback, desc, id, AUDIO_SESSION_DEVICE, /* pinned */ false, deviceId) {}
+    void addEffectToHal_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+
+private:
+    status_t removeEffectFromHal_l() final REQUIRES(audio_utils::EffectChain_Mutex);
+    bool mAddedToHal = false;
 };
 
 // The EffectHandle class implements the IEffect interface. It provides resources
@@ -328,7 +344,8 @@
     EffectHandle(const sp<IAfEffectBase>& effect,
             const sp<Client>& client,
             const sp<media::IEffectClient>& effectClient,
-            int32_t priority, bool notifyFramesProcessed);
+            int32_t priority, bool notifyFramesProcessed, bool isInternal = false,
+            audio_utils::MutexOrder mutexOrder = audio_utils::MutexOrder::kEffectHandle_Mutex);
     ~EffectHandle() override;
     status_t onTransact(
             uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) final;
@@ -348,6 +365,11 @@
                                       int32_t* _aidl_return) final;
 
     const sp<Client>& client() const final { return mClient; }
+    /**
+     * Checks if the handle is internal, aka created by AudioFlinger for internal needs (e.g.
+     * device effect HAL handle or device effect thread handle).
+     */
+    virtual bool isInternal() const { return mIsInternal; }
 
     sp<android::media::IEffect> asIEffect() final {
         return sp<android::media::IEffect>::fromExisting(this);
@@ -385,15 +407,18 @@
 
     void dumpToBuffer(char* buffer, size_t size) const final;
 
+protected:
+    // protects IEffect method calls
+    mutable audio_utils::mutex mMutex;
 
 private:
     DISALLOW_COPY_AND_ASSIGN(EffectHandle);
 
-    audio_utils::mutex& mutex() const RETURN_CAPABILITY(android::audio_utils::EffectHandle_Mutex) {
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::EffectHandle_Mutex) {
         return mMutex;
     }
-    // protects IEffect method calls
-    mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kEffectHandle_Mutex};
+
     const wp<IAfEffectBase> mEffect;               // pointer to controlled EffectModule
     const sp<media::IEffectClient> mEffectClient;  // callback interface for client notifications
     /*const*/ sp<Client> mClient;            // client for shared memory allocation, see
@@ -409,6 +434,28 @@
     bool mDisconnected;                      // Set to true by disconnect()
     const bool mNotifyFramesProcessed;       // true if the client callback event
                                              // EVENT_FRAMES_PROCESSED must be generated
+    const bool mIsInternal;
+};
+
+/**
+ * There are 2 types of effects:
+ * -Session Effect: handle is directly called from the client, without AudioFlinger lock.
+ * -Device Effect: a device effect proxy is aggregating a collection of internal effect handles that
+ * controls the same effect added on all audio patches involving the device effect selected port
+ * requested either by a client or by AudioPolicyEffects. These internal effect handles do not have
+ * client. Sequence flow implies a different locking order, hence the lock is specialied.
+ */
+class InternalEffectHandle : public EffectHandle {
+public:
+    InternalEffectHandle(const sp<IAfEffectBase>& effect, bool notifyFramesProcessed) :
+            EffectHandle(effect, /* client= */ nullptr, /* effectClient= */ nullptr,
+                         /* priority= */ 0, notifyFramesProcessed, /* isInternal */ true,
+                         audio_utils::MutexOrder::kDeviceEffectHandle_Mutex) {}
+
+    virtual audio_utils::mutex& mutex() const
+            RETURN_CAPABILITY(android::audio_utils::DeviceEffectHandle_Mutex) {
+        return mMutex;
+    }
 };
 
 // the EffectChain class represents a group of effects associated to one audio session.
@@ -712,7 +759,7 @@
              uint32_t mRightVolume;      // previous volume on right channel
              uint32_t mNewLeftVolume;       // new volume on left channel
              uint32_t mNewRightVolume;      // new volume on right channel
-             product_strategy_t mStrategy; // strategy for this effect chain
+             product_strategy_t mStrategy = PRODUCT_STRATEGY_NONE; // strategy for this effect chain
              // mSuspendedEffects lists all effects currently suspended in the chain.
              // Use effect type UUID timelow field as key. There is no real risk of identical
              // timeLow fields among effect type UUIDs.
@@ -746,7 +793,7 @@
     status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle, audio_patch_handle_t newPatchHandle,
            const IAfPatchPanel::Patch& patch) final;
 
-    void onReleasePatch(audio_patch_handle_t patchHandle) final;
+    sp<IAfEffectHandle> onReleasePatch(audio_patch_handle_t patchHandle) final;
 
     size_t removeEffect(const sp<IAfEffectModule>& effect) final;
 
@@ -795,7 +842,10 @@
         audio_channel_mask_t outChannelMask() const override;
         uint32_t outChannelCount() const override;
         audio_channel_mask_t hapticChannelMask() const override { return AUDIO_CHANNEL_NONE; }
-        size_t frameCount() const override  { return 0; }
+        /**
+         * frameCount cannot be zero.
+         */
+        size_t frameCount() const override  { return 1; }
         uint32_t latency() const override  { return 0; }
 
         status_t addEffectToHal(const sp<EffectHalInterface>& effect) override;
@@ -807,7 +857,7 @@
         void checkSuspendOnEffectEnabled(const sp<IAfEffectBase>& effect __unused,
                               bool enabled __unused, bool threadLocked __unused) override {}
         void resetVolume_l() override REQUIRES(audio_utils::EffectChain_Mutex) {}
-        product_strategy_t strategy() const override  { return static_cast<product_strategy_t>(0); }
+        product_strategy_t strategy() const override  { return PRODUCT_STRATEGY_NONE; }
         int32_t activeTrackCnt() const override { return 0; }
         void onEffectEnable(const sp<IAfEffectBase>& effect __unused) override;
         void onEffectDisable(const sp<IAfEffectBase>& effect __unused) override;
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index b9bb18c..3a059b6 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -153,7 +153,7 @@
 
 public:
     static sp<IAfEffectModule> create(
-            const sp<EffectCallbackInterface>& callabck,
+            const sp<EffectCallbackInterface>& callback,
             effect_descriptor_t *desc,
             int id,
             audio_session_t sessionId,
@@ -163,8 +163,9 @@
     virtual int16_t *inBuffer() const = 0;
     virtual status_t setDevices(const AudioDeviceTypeAddrVector &devices) = 0;
     virtual status_t setInputDevice(const AudioDeviceTypeAddr &device) = 0;
-    virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller,
-                               bool force = false) = 0;
+    virtual status_t setVolume_l(uint32_t* left, uint32_t* right,
+                                 bool controller /* effect controlling chain volume */,
+                                 bool force = false) REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) = 0;
     virtual bool isOffloaded_l() const = 0;
 
@@ -188,11 +189,13 @@
     virtual status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata)
             REQUIRES(audio_utils::ThreadBase_Mutex,
                      audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    // return true if there was a state change from STARTING to ACTIVE, or STOPPED to IDLE, effect
+    // chain will do a volume reset in these two cases
+    virtual bool updateState_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
 
 private:
     virtual void process() = 0;
-    virtual bool updateState_l()
-            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
     virtual void reset_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual status_t configure_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual status_t init_l()
@@ -211,6 +214,7 @@
 
     virtual status_t stop_l() = 0;
     virtual void addEffectToHal_l() = 0;
+    virtual status_t removeEffectFromHal_l() = 0;
     virtual void release_l(const std::string& from) = 0;
 };
 
@@ -341,7 +345,8 @@
 
     // sendMetadata_l() must be called with thread->mLock held
     virtual void sendMetadata_l(const std::vector<playback_track_metadata_v7_t>& allMetadata,
-        const std::optional<const std::vector<playback_track_metadata_v7_t>> spatializedMetadata);
+                                const std::optional<const std::vector<playback_track_metadata_v7_t>>
+                                        spatializedMetadata) = 0;
 
     virtual void dump(int fd, const Vector<String16>& args) const = 0;
 };
@@ -396,7 +401,14 @@
     virtual status_t onUpdatePatch(audio_patch_handle_t oldPatchHandle,
             audio_patch_handle_t newPatchHandle,
             const IAfPatchPanel::Patch& patch) = 0;
-    virtual void onReleasePatch(audio_patch_handle_t patchHandle) = 0;
+    /**
+     * Checks (and release) of the effect handle is linked with the given release patch handle.
+     *
+     * @param patchHandle handle of the released patch
+     * @return a reference on the effect handle released if any, nullptr otherwise.
+     * It allows to delay the destruction of the handle.
+     */
+    virtual sp<IAfEffectHandle> onReleasePatch(audio_patch_handle_t patchHandle) = 0;
 
     virtual void dump2(int fd, int spaces) const = 0; // TODO(b/291319101) naming?
 
diff --git a/services/audioflinger/IAfPatchPanel.h b/services/audioflinger/IAfPatchPanel.h
index 6110e4c..15b6ddf 100644
--- a/services/audioflinger/IAfPatchPanel.h
+++ b/services/audioflinger/IAfPatchPanel.h
@@ -82,7 +82,8 @@
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) REQUIRES(mutex()) = 0;
+            audio_output_flags_t* flags,
+            audio_attributes_t attributes) REQUIRES(mutex()) = 0;
     virtual audio_utils::mutex& mutex() const
             RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
     virtual const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 4b6ab89..abb8f2f 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -19,12 +19,14 @@
 #include <android/media/IAudioTrackCallback.h>
 #include <android/media/IEffectClient.h>
 #include <audiomanager/IAudioManager.h>
-#include <audio_utils/mutex.h>
+#include <audio_utils/DeferredExecutor.h>
 #include <audio_utils/MelProcessor.h>
+#include <audio_utils/mutex.h>
 #include <binder/MemoryDealer.h>
 #include <datapath/AudioStreamIn.h>
 #include <datapath/AudioStreamOut.h>
 #include <datapath/VolumeInterface.h>
+#include <datapath/VolumePortInterface.h>
 #include <fastpath/FastMixerDumpState.h>
 #include <media/DeviceDescriptorBase.h>
 #include <media/MmapStreamInterface.h>
@@ -36,6 +38,10 @@
 
 #include <optional>
 
+namespace com::android::media::permission {
+    class IPermissionProvider;
+}
+
 namespace android {
 
 class IAfDirectOutputThread;
@@ -116,9 +122,14 @@
             const sp<AudioIoDescriptor>& ioDesc,
             pid_t pid = 0) EXCLUDES_AudioFlinger_ClientMutex = 0;
     virtual void onNonOffloadableGlobalEffectEnable() EXCLUDES_AudioFlinger_Mutex = 0;
-    virtual void onSupportedLatencyModesChanged(
-            audio_io_handle_t output, const std::vector<audio_latency_mode_t>& modes)
+    virtual void onSupportedLatencyModesChanged(audio_io_handle_t output,
+                                                const std::vector<audio_latency_mode_t>& modes)
             EXCLUDES_AudioFlinger_ClientMutex = 0;
+
+    virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
+
+    virtual const ::com::android::media::permission::IPermissionProvider&
+            getPermissionProvider() = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
@@ -391,8 +402,14 @@
     // the Thread is not busy releasing the Tracks, during which the Thread mutex
     // may be temporarily unlocked.  Some Track methods will use this method to
     // avoid races.
-    virtual void waitWhileThreadBusy_l(audio_utils::unique_lock& ul)
+    virtual void waitWhileThreadBusy_l(audio_utils::unique_lock<audio_utils::mutex>& ul)
             REQUIRES(mutex()) = 0;
+
+    // The ThreadloopExecutor is used to defer functors or dtors
+    // to when the Threadloop does not hold any mutexes (at the end of the
+    // processing period cycle).
+    virtual audio_utils::DeferredExecutor& getThreadloopExecutor() = 0;
+
     // Dynamic cast to derived interface
     virtual sp<IAfDirectOutputThread> asIAfDirectOutputThread() { return nullptr; }
     virtual sp<IAfDuplicatingThread> asIAfDuplicatingThread() { return nullptr; }
@@ -463,7 +480,8 @@
             const sp<media::IAudioTrackCallback>& callback,
             bool isSpatialized,
             bool isBitPerfect,
-            audio_output_flags_t* afTrackFlags)
+            audio_output_flags_t* afTrackFlags,
+            float volume)
             REQUIRES(audio_utils::AudioFlinger_Mutex) = 0;
 
     virtual status_t addTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex()) = 0;
@@ -536,6 +554,12 @@
     virtual const std::atomic<int64_t>& framesWritten() const = 0;
 
     virtual bool usesHwAvSync() const = 0;
+
+    virtual void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMute)
+            EXCLUDES_ThreadBase_Mutex = 0;
+
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
+            EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfDirectOutputThread : public virtual IAfPlaybackThread {
@@ -675,6 +699,9 @@
             AudioHwDevice* hwDev, AudioStreamOut* output, bool systemReady);
 
     virtual AudioStreamOut* clearOutput() EXCLUDES_ThreadBase_Mutex = 0;
+
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
+            EXCLUDES_ThreadBase_Mutex = 0;
 };
 
 class IAfMmapCaptureThread : public virtual IAfMmapThread {
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 8ed44c6..ee834d6 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -21,6 +21,7 @@
 #include <audio_utils/mutex.h>
 #include <audiomanager/IAudioManager.h>
 #include <binder/IMemory.h>
+#include <datapath/VolumePortInterface.h>
 #include <fastpath/FastMixerDumpState.h>
 #include <media/AudioSystem.h>
 #include <media/VolumeShaper.h>
@@ -254,7 +255,7 @@
 };
 
 // Common interface for Playback tracks.
-class IAfTrack : public virtual IAfTrackBase {
+class IAfTrack : public virtual IAfTrackBase, public virtual VolumePortInterface {
 public:
     // FillingStatus is used for suppressing volume ramp at begin of playing
     enum FillingStatus { FS_INVALID, FS_FILLING, FS_FILLED, FS_ACTIVE };
@@ -289,7 +290,8 @@
             size_t frameCountToBeReady = SIZE_MAX,
             float speed = 1.0f,
             bool isSpatialized = false,
-            bool isBitPerfect = false);
+            bool isBitPerfect = false,
+            float volume = 0.0f);
 
     virtual void pause() = 0;
     virtual void flush() = 0;
@@ -375,6 +377,8 @@
     virtual void triggerEvents(AudioSystem::sync_event_t type) = 0;
 
     virtual void disable() = 0;
+    virtual bool isDisabled() const = 0;
+
     virtual int& fastIndex() = 0;
     virtual bool isPlaybackRestricted() const = 0;
 
@@ -425,6 +429,10 @@
     virtual FillingStatus& fillingStatus() = 0;
     virtual int8_t& retryCount() = 0;
     virtual FastTrackUnderruns& fastTrackUnderruns() = 0;
+
+    // Internal mute, this is currently only used for bit-perfect playback
+    virtual bool getInternalMute() const = 0;
+    virtual void setInternalMute(bool muted) = 0;
 };
 
 // playback track, used by DuplicatingThread
@@ -446,7 +454,7 @@
     virtual ExtendedTimestamp getClientProxyTimestamp() const = 0;
 };
 
-class IAfMmapTrack : public virtual IAfTrackBase {
+class IAfMmapTrack : public virtual IAfTrackBase, public virtual VolumePortInterface {
 public:
     static sp<IAfMmapTrack> create(IAfThreadBase* thread,
             const audio_attributes_t& attr,
@@ -457,7 +465,8 @@
             bool isOut,
             const android::content::AttributionSourceState& attributionSource,
             pid_t creatorPid,
-            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
+            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
+            float volume = 0.0f);
 
     // protected by MMapThread::mLock
     virtual void setSilenced_l(bool silenced) = 0;
@@ -573,10 +582,12 @@
             size_t bufferSize,
             audio_output_flags_t flags,
             const Timeout& timeout = {},
-            size_t frameCountToBeReady = 1 /** Default behaviour is to start
+            size_t frameCountToBeReady = 1, /** Default behaviour is to start
                                              *  as soon as possible to have
                                              *  the lowest possible latency
-                                             *  even if it might glitch. */);
+                                             *  even if it might glitch. */
+            float speed = 1.0f,
+            float volume = 1.0f);
 };
 
 class IAfPatchRecord : public virtual IAfRecordTrack, public virtual IAfPatchTrackBase {
diff --git a/services/audioflinger/MelReporter.cpp b/services/audioflinger/MelReporter.cpp
index 1d38306..57f4ff6 100644
--- a/services/audioflinger/MelReporter.cpp
+++ b/services/audioflinger/MelReporter.cpp
@@ -117,6 +117,11 @@
     }
 }
 
+void MelReporter::resetReferencesForTest() {
+    mAfMelReporterCallback.clear();
+    mSoundDoseManager->resetReferencesForTest();
+}
+
 void MelReporter::onCreateAudioPatch(audio_patch_handle_t handle,
         const IAfPatchPanel::Patch& patch) {
     if (!mSoundDoseManager->isCsdEnabled()) {
diff --git a/services/audioflinger/MelReporter.h b/services/audioflinger/MelReporter.h
index 0aeb225..8b062f3 100644
--- a/services/audioflinger/MelReporter.h
+++ b/services/audioflinger/MelReporter.h
@@ -103,6 +103,8 @@
             const std::vector<playback_track_metadata_v7_t>& metadataVec)
             EXCLUDES_AudioFlinger_Mutex;
 
+    void resetReferencesForTest();
+
 private:
     struct ActiveMelPatch {
         audio_io_handle_t streamHandle{AUDIO_IO_HANDLE_NONE};
@@ -131,7 +133,7 @@
 
     bool useHalSoundDoseInterface_l() REQUIRES(mutex());
 
-    const sp<IAfMelReporterCallback> mAfMelReporterCallback;
+    sp<IAfMelReporterCallback> mAfMelReporterCallback;
     const sp<IAfPatchPanel> mAfPatchPanel;
 
     /* const */ sp<SoundDoseManager> mSoundDoseManager;  // set onFirstRef
diff --git a/services/audioflinger/MmapTracks.h b/services/audioflinger/MmapTracks.h
index 85ce142..8758bd0 100644
--- a/services/audioflinger/MmapTracks.h
+++ b/services/audioflinger/MmapTracks.h
@@ -35,7 +35,8 @@
                             bool isOut,
                             const android::content::AttributionSourceState& attributionSource,
                             pid_t creatorPid,
-                            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE);
+                            audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE,
+                            float volume = 0.0f);
     ~MmapTrack() override;
 
     status_t initCheck() const final;
@@ -65,6 +66,13 @@
     void processMuteEvent_l(const sp<IAudioManager>& audioManager,
                             mute_state_t muteState)
                             /* REQUIRES(MmapPlaybackThread::mLock) */ final;
+
+    // VolumePortInterface implementation
+    void setPortVolume(float volume) override {
+        mVolume = volume;
+    }
+    float getPortVolume() const override { return mVolume; }
+
 private:
     DISALLOW_COPY_AND_ASSIGN(MmapTrack);
 
@@ -87,6 +95,8 @@
             /* GUARDED_BY(MmapPlaybackThread::mLock) */;
     mute_state_t mMuteState
             /* GUARDED_BY(MmapPlaybackThread::mLock) */;
+
+    float mVolume = 0.0f;
 };  // end of Track
 
 } // namespace android
\ No newline at end of file
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 4333cc8..d0b96de 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -260,6 +260,7 @@
                     if (patch->sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS) {
                         flags = patch->sinks[0].flags.output;
                     }
+                    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
                     const sp<IAfThreadBase> thread = mAfPatchPanelCallback->openOutput_l(
                                                             patch->sinks[0].ext.device.hw_module,
                                                             &output,
@@ -267,7 +268,8 @@
                                                             &mixerConfig,
                                                             outputDevice,
                                                             outputDeviceAddress,
-                                                            flags);
+                                                            &flags,
+                                                            attributes);
                     ALOGV("mAfPatchPanelCallback->openOutput_l() returned %p", thread.get());
                     if (thread == 0) {
                         status = NO_MEMORY;
@@ -646,7 +648,9 @@
                                            tempRecordTrack->bufferSize(),
                                            outputFlags,
                                            {} /*timeout*/,
-                                           frameCountToBeReady);
+                                           frameCountToBeReady,
+                                           1.0f /*speed*/,
+                                           1.0f /*volume*/);
     status = mPlayback.checkTrack(tempPatchTrack.get());
     if (status != NO_ERROR) {
         return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 6c22e21..84758a4 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -96,7 +96,8 @@
                                 size_t frameCountToBeReady = SIZE_MAX,
                                 float speed = 1.0f,
                                 bool isSpatialized = false,
-                                bool isBitPerfect = false);
+                                bool isBitPerfect = false,
+                                float volume = 0.0f);
     ~Track() override;
     status_t initCheck() const final;
     void appendDumpHeader(String8& result) const final;
@@ -220,6 +221,13 @@
      */
     void processMuteEvent_l(const sp<IAudioManager>& audioManager, mute_state_t muteState) final;
 
+    bool getInternalMute() const final { return mInternalMute; }
+    void setInternalMute(bool muted) final { mInternalMute = muted; }
+
+    // VolumePortInterface implementation
+    void setPortVolume(float volume) override;
+    float getPortVolume() const override { return mVolume; }
+
 protected:
 
     DISALLOW_COPY_AND_ASSIGN(Track);
@@ -275,6 +283,8 @@
     void triggerEvents(AudioSystem::sync_event_t type) final;
     void invalidate() final;
     void disable() final;
+    bool isDisabled() const final;
+
     int& fastIndex() final { return mFastIndex; }
     bool isPlaybackRestricted() const final {
         // The monitor is only created for tracks that can be silenced.
@@ -358,6 +368,8 @@
         for (auto& tp : mTeePatches) { f(tp.patchTrack); }
     };
 
+    void                populateUsageAndContentTypeFromStreamType();
+
     size_t              mPresentationCompleteFrames = 0; // (Used for Mixed tracks)
                                     // The number of frames written to the
                                     // audio HAL when this track is considered fully rendered.
@@ -399,6 +411,8 @@
     // access these two variables only when holding player thread lock.
     std::unique_ptr<os::PersistableBundle> mMuteEventExtras;
     mute_state_t        mMuteState;
+    bool                mInternalMute = false;
+    std::atomic<float> mVolume = 0.0f;
 };  // end of Track
 
 
@@ -449,7 +463,7 @@
     void                queueBuffer(Buffer& inBuffer);
     void                clearBufferQueue();
 
-    void                restartIfDisabled();
+    void restartIfDisabled() override;
 
     // Maximum number of pending buffers allocated by OutputTrack::write()
     static const uint8_t kMaxOverFlowBuffers = 10;
@@ -491,10 +505,12 @@
                                    size_t bufferSize,
                                    audio_output_flags_t flags,
                                    const Timeout& timeout = {},
-                                   size_t frameCountToBeReady = 1 /** Default behaviour is to start
+                                   size_t frameCountToBeReady = 1, /** Default behaviour is to start
                                                                     *  as soon as possible to have
                                                                     *  the lowest possible latency
-                                                                    *  even if it might glitch. */);
+                                                                    *  even if it might glitch. */
+                                   float speed = 1.0f,
+                                   float volume = 1.0f);
     ~PatchTrack() override;
 
     size_t framesReady() const final;
@@ -512,7 +528,7 @@
     void releaseBuffer(Proxy::Buffer* buffer) final;
 
 private:
-            void restartIfDisabled();
+    void restartIfDisabled() override;
 };  // end of PatchTrack
 
 } // namespace android
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index fd5d5fc..9c8a51c 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -33,6 +33,7 @@
 #include <afutils/Vibrator.h>
 #include <audio_utils/MelProcessor.h>
 #include <audio_utils/Metadata.h>
+#include <com_android_media_audioserver.h>
 #ifdef DEBUG_CPU_USAGE
 #include <audio_utils/Statistics.h>
 #include <cpustats/ThreadCpuUsage.h>
@@ -48,6 +49,7 @@
 #include <binder/IServiceManager.h>
 #include <binder/PersistableBundle.h>
 #include <com_android_media_audio.h>
+#include <com_android_media_audioserver.h>
 #include <cutils/bitops.h>
 #include <cutils/properties.h>
 #include <fastpath/AutoPark.h>
@@ -71,6 +73,7 @@
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
 #include <media/nbaio/SourceAudioBufferProvider.h>
+#include <media/ValidatedAttributionSourceState.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/Process.h>
 #include <mediautils/SchedulingPolicyService.h>
@@ -78,6 +81,7 @@
 #include <powermanager/PowerManager.h>
 #include <private/android_filesystem_config.h>
 #include <private/media/AudioTrackShared.h>
+#include <psh_utils/AudioPowerManager.h>
 #include <system/audio_effects/effect_aec.h>
 #include <system/audio_effects/effect_downmix.h>
 #include <system/audio_effects/effect_ns.h>
@@ -119,6 +123,9 @@
     return a < b ? a : b;
 }
 
+using com::android::media::permission::ValidatedAttributionSourceState;
+namespace audioserver_flags = com::android::media::audioserver;
+
 namespace android {
 
 using audioflinger::SyncEvent;
@@ -161,6 +168,9 @@
 
 // maximum time to wait in sendConfigEvent_l() for a status to be received
 static const nsecs_t kConfigEventTimeoutNs = seconds(2);
+// longer timeout for create audio patch to account for specific scenarii
+// with Bluetooth devices
+static const nsecs_t kCreatePatchEventTimeoutNs = seconds(4);
 
 // minimum sleep time for the mixer thread loop when tracks are active but in underrun
 static const uint32_t kMinThreadSleepTimeUs = 5000;
@@ -727,9 +737,11 @@
     mutex().unlock();
     {
         audio_utils::unique_lock _l(event->mutex());
+        nsecs_t timeoutNs = event->mType == CFG_EVENT_CREATE_AUDIO_PATCH ?
+              kCreatePatchEventTimeoutNs : kConfigEventTimeoutNs;
         while (event->mWaitStatus) {
             if (event->mCondition.wait_for(
-                    _l, std::chrono::nanoseconds(kConfigEventTimeoutNs), getTid())
+                    _l, std::chrono::nanoseconds(timeoutNs), getTid())
                             == std::cv_status::timeout) {
                 event->mStatus = TIMED_OUT;
                 event->mWaitStatus = false;
@@ -1185,6 +1197,8 @@
         return String16("MmapCapture");
     case SPATIALIZER:
         return String16("AudioSpatial");
+    case BIT_PERFECT:
+        return String16("AudioBitPerfect");
     default:
         ALOG_ASSERT(false);
         return String16("AudioUnknown");
@@ -1205,6 +1219,10 @@
                     {} /* historyTag */);
         if (status.isOk()) {
             mWakeLockToken = binder;
+            if (media::psh_utils::AudioPowerManager::enabled()) {
+                mThreadToken = media::psh_utils::createAudioThreadToken(
+                        getTid(), String8(getWakeLockTag()).c_str());
+            }
         }
         ALOGV("acquireWakeLock_l() %s status %d", mThreadName, status.exceptionCode());
     }
@@ -1230,6 +1248,7 @@
         }
         mWakeLockToken.clear();
     }
+    mThreadToken.reset();
 }
 
 void ThreadBase::getPowerManager_l() {
@@ -1486,8 +1505,8 @@
     }
 
     if (IAfEffectModule::isHapticGenerator(&desc->type) && mHapticChannelCount == 0) {
-        ALOGW("%s: thread doesn't support haptic playback while the effect is HapticGenerator",
-                __func__);
+        ALOGW("%s: thread (%s) doesn't support haptic playback while the effect is HapticGenerator",
+              __func__, threadTypeToString(mType));
         return BAD_VALUE;
     }
 
@@ -1569,14 +1588,13 @@
         }
         break;
     case SPATIALIZER:
-        // Global effects (AUDIO_SESSION_OUTPUT_MIX) are not supported on spatializer mixer
-        // as there is no common accumulation buffer for sptialized and non sptialized tracks.
+        // Global effects (AUDIO_SESSION_OUTPUT_MIX) are supported on spatializer mixer, but only
+        // the spatialized track have global effects applied for now.
         // Post processing effects (AUDIO_SESSION_OUTPUT_STAGE or AUDIO_SESSION_DEVICE)
         // are supported and added after the spatializer.
         if (sessionId == AUDIO_SESSION_OUTPUT_MIX) {
-            ALOGW("%s: global effect %s not supported on spatializer thread %s",
-                    __func__, desc->name, mThreadName);
-            return BAD_VALUE;
+            ALOGD("%s: global effect %s on spatializer thread %s", __func__, desc->name,
+                  mThreadName);
         } else if (sessionId == AUDIO_SESSION_OUTPUT_STAGE) {
             // only post processing , downmixer or spatializer effects on output stage session
             if (IAfEffectModule::isSpatializer(&desc->type)
@@ -1695,7 +1713,7 @@
             // TODO(b/184194057): Use the vibrator information from the vibrator that will be used
             // for the HapticGenerator.
             const std::optional<media::AudioVibratorInfo> defaultVibratorInfo =
-                    std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
+                    mAfThreadCallback->getDefaultVibratorInfo_l();
             if (defaultVibratorInfo) {
                 audio_utils::lock_guard _cl(chain->mutex());
                 // Only set the vibrator info when it is a valid one.
@@ -2208,17 +2226,18 @@
                 (int64_t)(mIsMsdDevice ? AUDIO_DEVICE_OUT_BUS // turn on by default for MSD
                                        : AUDIO_DEVICE_NONE));
     }
-
-    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
-        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
-        mStreamTypes[stream].volume = 0.0f;
-        mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+    if (!audioserver_flags::portid_volume_management()) {
+        for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+            const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+            mStreamTypes[stream].volume = 0.0f;
+            mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+        }
+        // Audio patch and call assistant volume are always max
+        mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+        mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+        mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+        mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
     }
-    // Audio patch and call assistant volume are always max
-    mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
-    mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
-    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
-    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
 }
 
 PlaybackThread::~PlaybackThread()
@@ -2269,16 +2288,17 @@
 void PlaybackThread::dumpTracks_l(int fd, const Vector<String16>& /* args */)
 {
     String8 result;
-
-    result.appendFormat("  Stream volumes in dB: ");
-    for (int i = 0; i < AUDIO_STREAM_CNT; ++i) {
-        const stream_type_t *st = &mStreamTypes[i];
-        if (i > 0) {
-            result.appendFormat(", ");
-        }
-        result.appendFormat("%d:%.2g", i, 20.0 * log10(st->volume));
-        if (st->mute) {
-            result.append("M");
+    if (!audioserver_flags::portid_volume_management()) {
+        result.appendFormat("  Stream volumes in dB: ");
+        for (int i = 0; i < AUDIO_STREAM_CNT; ++i) {
+            const stream_type_t *st = &mStreamTypes[i];
+            if (i > 0) {
+                result.appendFormat(", ");
+            }
+            result.appendFormat("%d:%.2g", i, 20.0 * log10(st->volume));
+            if (st->mute) {
+                result.append("M");
+            }
         }
     }
     result.append("\n");
@@ -2386,7 +2406,8 @@
         const sp<media::IAudioTrackCallback>& callback,
         bool isSpatialized,
         bool isBitPerfect,
-        audio_output_flags_t *afTrackFlags)
+        audio_output_flags_t *afTrackFlags,
+        float volume)
 {
     size_t frameCount = *pFrameCount;
     size_t notificationFrameCount = *pNotificationFrameCount;
@@ -2696,14 +2717,17 @@
             }
         }
 
-        // Set DIRECT flag if current thread is DirectOutputThread. This can
-        // happen when the playback is rerouted to direct output thread by
+        // Set DIRECT/OFFLOAD flag if current thread is DirectOutputThread/OffloadThread.
+        // This can happen when the playback is rerouted to direct output/offload thread by
         // dynamic audio policy.
         // Do NOT report the flag changes back to client, since the client
-        // doesn't explicitly request a direct flag.
+        // doesn't explicitly request a direct/offload flag.
         audio_output_flags_t trackFlags = *flags;
         if (mType == DIRECT) {
             trackFlags = static_cast<audio_output_flags_t>(trackFlags | AUDIO_OUTPUT_FLAG_DIRECT);
+        } else if (mType == OFFLOAD) {
+            trackFlags = static_cast<audio_output_flags_t>(trackFlags |
+                                   AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT);
         }
         *afTrackFlags = trackFlags;
 
@@ -2712,7 +2736,7 @@
                           nullptr /* buffer */, (size_t)0 /* bufferSize */, sharedBuffer,
                           sessionId, creatorPid, attributionSource, trackFlags,
                           IAfTrackBase::TYPE_DEFAULT, portId, SIZE_MAX /*frameCountToBeReady*/,
-                          speed, isSpatialized, isBitPerfect);
+                          speed, isSpatialized, isBitPerfect, volume);
 
         lStatus = track != 0 ? track->initCheck() : (status_t) NO_MEMORY;
         if (lStatus != NO_ERROR) {
@@ -2840,6 +2864,22 @@
     return mStreamTypes[stream].volume;
 }
 
+status_t PlaybackThread::setPortsVolume(
+        const std::vector<audio_port_handle_t>& portIds, float volume) {
+    audio_utils::lock_guard _l(mutex());
+    for (const auto& portId : portIds) {
+        for (size_t i = 0; i < mTracks.size(); i++) {
+            sp<IAfTrack> track = mTracks[i].get();
+            if (portId == track->portId()) {
+                track->setPortVolume(volume);
+                break;
+            }
+        }
+    }
+    broadcast_l();
+    return NO_ERROR;
+}
+
 void PlaybackThread::setVolumeForOutput_l(float left, float right) const
 {
     mOutput->stream->setVolume(left, right);
@@ -2913,7 +2953,7 @@
                 // TODO(b/184194780): Use the vibrator information from the vibrator that will be
                 // used to play this track.
                  audio_utils::lock_guard _l(mAfThreadCallback->mutex());
-                vibratorInfo = std::move(mAfThreadCallback->getDefaultVibratorInfo_l());
+                vibratorInfo = mAfThreadCallback->getDefaultVibratorInfo_l();
             }
             mutex().lock();
             track->setHapticScale(hapticScale);
@@ -3003,6 +3043,23 @@
     }
 }
 
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds_l()
+{
+    std::set<int32_t> result;
+    for (const auto& t : mTracks) {
+        if (t->isExternalTrack()) {
+            result.insert(t->portId());
+        }
+    }
+    return result;
+}
+
+std::set<audio_port_handle_t> PlaybackThread::getTrackPortIds()
+{
+    audio_utils::lock_guard _l(mutex());
+    return getTrackPortIds_l();
+}
+
 String8 PlaybackThread::getParameters(const String8& keys)
 {
     audio_utils::lock_guard _l(mutex());
@@ -3056,9 +3113,9 @@
     mCallbackThread->resetDraining();
 }
 
-void PlaybackThread::onError()
+void PlaybackThread::onError(bool isHardError)
 {
-    mCallbackThread->setAsyncError();
+    mCallbackThread->setAsyncError(isHardError);
 }
 
 void PlaybackThread::onCodecFormatChanged(
@@ -3401,9 +3458,9 @@
         return NO_ERROR;
     } else {
         status_t status;
-        uint32_t frames;
+        uint64_t frames = 0;
         status = mOutput->getRenderPosition(&frames);
-        *dspFrames = (size_t)frames;
+        *dspFrames = (uint32_t)frames;
         return status;
     }
 }
@@ -3774,19 +3831,31 @@
             ALOGV("addEffectChain_l() creating new input buffer %p session %d",
                     buffer, session);
         } else {
-            // A global session on a SPATIALIZER thread is either OUTPUT_STAGE or DEVICE
-            // - OUTPUT_STAGE session uses the mEffectBuffer as input buffer and
-            // mPostSpatializerBuffer as output buffer
-            // - DEVICE session uses the mPostSpatializerBuffer as input and output buffer.
-            status_t result = mAfThreadCallback->getEffectsFactoryHal()->mirrorBuffer(
-                    mEffectBuffer, mEffectBufferSize, &halInBuffer);
-            if (result != OK) return result;
-            result = mAfThreadCallback->getEffectsFactoryHal()->mirrorBuffer(
-                    mPostSpatializerBuffer, mPostSpatializerBufferSize, &halOutBuffer);
-            if (result != OK) return result;
+            status_t result = INVALID_OPERATION;
+            // Buffer configuration for global sessions on a SPATIALIZER thread:
+            // - AUDIO_SESSION_OUTPUT_MIX session uses the mEffectBuffer as input and output buffer
+            // - AUDIO_SESSION_OUTPUT_STAGE session uses the mEffectBuffer as input buffer and
+            //   mPostSpatializerBuffer as output buffer
+            // - AUDIO_SESSION_DEVICE session uses the mPostSpatializerBuffer as input and output
+            //   buffer
+            if (session == AUDIO_SESSION_OUTPUT_MIX || session == AUDIO_SESSION_OUTPUT_STAGE) {
+                result = mAfThreadCallback->getEffectsFactoryHal()->mirrorBuffer(
+                        mEffectBuffer, mEffectBufferSize, &halInBuffer);
+                if (result != OK) return result;
 
-            if (session == AUDIO_SESSION_DEVICE) {
-                halInBuffer = halOutBuffer;
+                if (session == AUDIO_SESSION_OUTPUT_MIX) {
+                    halOutBuffer = halInBuffer;
+                }
+            }
+
+            if (session == AUDIO_SESSION_OUTPUT_STAGE || session == AUDIO_SESSION_DEVICE) {
+                result = mAfThreadCallback->getEffectsFactoryHal()->mirrorBuffer(
+                        mPostSpatializerBuffer, mPostSpatializerBufferSize, &halOutBuffer);
+                if (result != OK) return result;
+
+                if (session == AUDIO_SESSION_DEVICE) {
+                    halInBuffer = halOutBuffer;
+                }
             }
         }
     } else {
@@ -3996,7 +4065,13 @@
     // FIXME could this be made local to while loop?
     writeFrames = 0;
 
-    cacheParameters_l();
+    {
+        audio_utils::lock_guard l(mutex());
+
+        cacheParameters_l();
+        checkSilentMode_l();
+    }
+
     mSleepTimeUs = mIdleSleepTimeUs;
 
     if (mType == MIXER || mType == SPATIALIZER) {
@@ -4021,8 +4096,6 @@
     // suspended mode (for now) to help schedule the wait time until next iteration.
     nsecs_t timeLoopNextNs = 0;
 
-    checkSilentMode_l();
-
     audio_patch_handle_t lastDownstreamPatchHandle = AUDIO_PATCH_HANDLE_NONE;
 
     sendCheckOutputStageEffectsEvent();
@@ -4625,7 +4698,11 @@
 
         // FIXME Note that the above .clear() is no longer necessary since effectChains
         // is now local to this block, but will keep it for now (at least until merge done).
+
+        mThreadloopExecutor.process();
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     threadLoop_exit();
 
@@ -5068,7 +5145,6 @@
         // mPipeSink below
         // mNormalSink below
 {
-    setMasterBalance(afThreadCallback->getMasterBalance_l());
     ALOGV("MixerThread() id=%d type=%d", id, type);
     ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
             "mFrameCount=%zu, mNormalFrameCount=%zu",
@@ -5080,6 +5156,8 @@
         // The Duplicating thread uses the AudioMixer and delivers data to OutputTracks
         // (downstream MixerThreads) in DuplicatingThread::threadLoop_write().
         // Do not create or use mFastMixer, mOutputSink, mPipeSink, or mNormalSink.
+        // Balance is *not* set in the DuplicatingThread here (or from AudioFlinger),
+        // as the downstream MixerThreads implement it.
         return;
     }
     // create an NBAIO sink for the HAL output stream, and negotiate
@@ -5175,7 +5253,7 @@
                                                     // audio to FastMixer
         fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
         fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
-        fastTrack->mHapticScale = {/*level=*/os::HapticLevel::NONE };
+        fastTrack->mHapticScale = os::HapticScale::none();
         fastTrack->mHapticMaxAmplitude = NAN;
         fastTrack->mGeneration++;
         state->mFastTracksGen++;
@@ -5198,7 +5276,10 @@
         mFastMixerNBLogWriter = afThreadCallback->newWriter_l(kFastMixerLogSize, "FastMixer");
         state->mNBLogWriter = mFastMixerNBLogWriter.get();
         sq->end();
-        sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+        {
+            audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
+            sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+        }
 
         NBLog::thread_info_t info;
         info.id = mId;
@@ -5239,6 +5320,9 @@
         mNormalSink = initFastMixer ? mPipeSink : mOutputSink;
         break;
     }
+    // setMasterBalance needs to be called after the FastMixer
+    // (if any) is set up, in order to deliver the balance settings to it.
+    setMasterBalance(afThreadCallback->getMasterBalance_l());
 }
 
 MixerThread::~MixerThread()
@@ -5254,8 +5338,11 @@
         }
         state->mCommand = FastMixerState::EXIT;
         sq->end();
-        sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
-        mFastMixer->join();
+        {
+            audio_utils::mutex::scoped_join_wait_check queueWaitCheck(mFastMixer->getTid());
+            sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+            mFastMixer->join();
+        }
         // Though the fast mixer thread has exited, it's state queue is still valid.
         // We'll use that extract the final state which contains one remaining fast track
         // corresponding to our sub-mix.
@@ -5335,7 +5422,10 @@
                 FastThreadDumpState::kSamplingNforLowRamDevice : FastThreadDumpState::kSamplingN);
 #endif
             sq->end();
-            sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+            {
+                audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
+                sq->push(FastMixerStateQueue::BLOCK_UNTIL_PUSHED);
+            }
             if (kUseFastMixer == FastMixer_Dynamic) {
                 mNormalSink = mPipeSink;
             }
@@ -5368,7 +5458,10 @@
             mFastMixerFutex = 0;
             sq->end();
             // BLOCK_UNTIL_PUSHED would be insufficient, as we need it to stop doing I/O now
-            sq->push(FastMixerStateQueue::BLOCK_UNTIL_ACKED);
+            {
+                audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
+                sq->push(FastMixerStateQueue::BLOCK_UNTIL_ACKED);
+            }
             if (kUseFastMixer == FastMixer_Dynamic) {
                 mNormalSink = mOutputSink;
             }
@@ -5424,11 +5517,15 @@
     broadcast_l();
 }
 
-void PlaybackThread::onAsyncError()
+void PlaybackThread::onAsyncError(bool isHardError)
 {
+    auto allTrackPortIds = getTrackPortIds();
     for (int i = AUDIO_STREAM_SYSTEM; i < (int)AUDIO_STREAM_CNT; i++) {
         invalidateTracks((audio_stream_type_t)i);
     }
+    if (isHardError) {
+        mAfThreadCallback->onHardError(allTrackPortIds);
+    }
 }
 
 void MixerThread::threadLoop_mix()
@@ -5742,12 +5839,19 @@
                 }
                 sp<AudioTrackServerProxy> proxy = track->audioTrackServerProxy();
                 float volume;
-                if (track->isPlaybackRestricted() || mStreamTypes[track->streamType()].mute) {
-                    volume = 0.f;
+                if (!audioserver_flags::portid_volume_management()) {
+                    if (track->isPlaybackRestricted() || mStreamTypes[track->streamType()].mute) {
+                        volume = 0.f;
+                    } else {
+                        volume = masterVolume * mStreamTypes[track->streamType()].volume;
+                    }
                 } else {
-                    volume = masterVolume * mStreamTypes[track->streamType()].volume;
+                    if (track->isPlaybackRestricted()) {
+                        volume = 0.f;
+                    } else {
+                        volume = masterVolume * track->getPortVolume();
+                    }
                 }
-
                 handleVoipVolume_l(&volume);
 
                 // cache the combined master volume and stream type volume for fast mixer; this
@@ -5759,18 +5863,31 @@
                 gain_minifloat_packed_t vlr = proxy->getVolumeLR();
                 float vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
                 float vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
-
-                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                    /*muteState=*/{masterVolume == 0.f,
-                                   mStreamTypes[track->streamType()].volume == 0.f,
-                                   mStreamTypes[track->streamType()].mute,
-                                   track->isPlaybackRestricted(),
-                                   vlf == 0.f && vrf == 0.f,
-                                   vh == 0.f});
-
+                if (!audioserver_flags::portid_volume_management()) {
+                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                            /*muteState=*/{masterVolume == 0.f,
+                                           mStreamTypes[track->streamType()].volume == 0.f,
+                                           mStreamTypes[track->streamType()].mute,
+                                           track->isPlaybackRestricted(),
+                                           vlf == 0.f && vrf == 0.f,
+                                           vh == 0.f});
+                } else {
+                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                            /*muteState=*/{masterVolume == 0.f,
+                                           track->getPortVolume() == 0.f,
+                                           /* muteFromStreamMuted= */ false,
+                                           track->isPlaybackRestricted(),
+                                           vlf == 0.f && vrf == 0.f,
+                                           vh == 0.f});
+                }
                 vlf *= volume;
                 vrf *= volume;
 
+                if (track->getInternalMute()) {
+                    vlf = 0.f;
+                    vrf = 0.f;
+                }
+
                 track->setFinalVolume(vlf, vrf);
                 ++fastTracks;
             } else {
@@ -5913,16 +6030,22 @@
             uint32_t vl, vr;       // in U8.24 integer format
             float vlf, vrf, vaf;   // in [0.0, 1.0] float format
             // read original volumes with volume control
-            float v = masterVolume * mStreamTypes[track->streamType()].volume;
             // Always fetch volumeshaper volume to ensure state is updated.
             const sp<AudioTrackServerProxy> proxy = track->audioTrackServerProxy();
             const float vh = track->getVolumeHandler()->getVolume(
                     track->audioTrackServerProxy()->framesReleased()).first;
-
-            if (mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
-                v = 0;
+            float v;
+            if (!audioserver_flags::portid_volume_management()) {
+                v = masterVolume * mStreamTypes[track->streamType()].volume;
+                if (mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
+                    v = 0;
+                }
+            } else {
+                v = masterVolume * track->getPortVolume();
+                if (track->isPlaybackRestricted()) {
+                    v = 0;
+                }
             }
-
             handleVoipVolume_l(&v);
 
             if (track->isPausing()) {
@@ -5942,15 +6065,23 @@
                     ALOGV("Track right volume out of range: %.3g", vrf);
                     vrf = GAIN_FLOAT_UNITY;
                 }
-
-                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                    /*muteState=*/{masterVolume == 0.f,
-                                   mStreamTypes[track->streamType()].volume == 0.f,
-                                   mStreamTypes[track->streamType()].mute,
-                                   track->isPlaybackRestricted(),
-                                   vlf == 0.f && vrf == 0.f,
-                                   vh == 0.f});
-
+                if (!audioserver_flags::portid_volume_management()) {
+                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                            /*muteState=*/{masterVolume == 0.f,
+                                           mStreamTypes[track->streamType()].volume == 0.f,
+                                           mStreamTypes[track->streamType()].mute,
+                                           track->isPlaybackRestricted(),
+                                           vlf == 0.f && vrf == 0.f,
+                                           vh == 0.f});
+                } else {
+                    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                            /*muteState=*/{masterVolume == 0.f,
+                                           track->getPortVolume() == 0.f,
+                                           /* muteFromStreamMuted= */ false,
+                                           track->isPlaybackRestricted(),
+                                           vlf == 0.f && vrf == 0.f,
+                                           vh == 0.f});
+                }
                 // now apply the master volume and stream type volume and shaper volume
                 vlf *= v * vh;
                 vrf *= v * vh;
@@ -5970,7 +6101,12 @@
                 vaf = v * sendLevel * (1. / MAX_GAIN_INT);
             }
 
-            track->setFinalVolume(vrf, vlf);
+            if (track->getInternalMute()) {
+                vrf = 0.f;
+                vlf = 0.f;
+            }
+
+            track->setFinalVolume(vlf, vrf);
 
             // Delegate volume control to effect in track effect chain if needed
             if (chain != 0 && chain->setVolume(&vl, &vr)) {
@@ -6159,8 +6295,8 @@
                 // No buffers for this track. Give it a few chances to
                 // fill a buffer, then remove it from active list.
                 if (--(track->retryCount()) <= 0) {
-                    ALOGI("BUFFER TIMEOUT: remove(%d) from active list on thread %p",
-                            trackId, this);
+                    ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to underrun"
+                          " on thread %d", __func__, trackId, mId);
                     tracksToRemove->add(track);
                     // indicate to client process that the track was disabled because of underrun;
                     // it will then automatically call start() when data is available
@@ -6221,7 +6357,10 @@
         //
         // This occurs with BT suspend when we idle the FastMixer with
         // active tracks, which may be added or removed.
-        sq->push(coldIdle ? FastMixerStateQueue::BLOCK_NEVER : block);
+        {
+            audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastMixer->getTid());
+            sq->push(coldIdle ? FastMixerStateQueue::BLOCK_NEVER : block);
+        }
     }
 #ifdef AUDIO_WATCHDOG
     if (pauseAudioWatchdog && mAudioWatchdog != 0) {
@@ -6671,34 +6810,64 @@
 
     const bool clientVolumeMute = (left == 0.f && right == 0.f);
 
-    if (mMasterMute || mStreamTypes[track->streamType()].mute || track->isPlaybackRestricted()) {
-        left = right = 0;
-    } else {
-        float typeVolume = mStreamTypes[track->streamType()].volume;
-        const float v = mMasterVolume * typeVolume * shaperVolume;
+    if (!audioserver_flags::portid_volume_management()) {
+        if (mMasterMute || mStreamTypes[track->streamType()].mute ||
+            track->isPlaybackRestricted()) {
+            left = right = 0;
+        } else {
+            float typeVolume = mStreamTypes[track->streamType()].volume;
+            const float v = mMasterVolume * typeVolume * shaperVolume;
 
-        if (left > GAIN_FLOAT_UNITY) {
-            left = GAIN_FLOAT_UNITY;
-        }
-        if (right > GAIN_FLOAT_UNITY) {
-            right = GAIN_FLOAT_UNITY;
-        }
-        left *= v;
-        right *= v;
-        if (mAfThreadCallback->getMode() != AUDIO_MODE_IN_COMMUNICATION
+            if (left > GAIN_FLOAT_UNITY) {
+                left = GAIN_FLOAT_UNITY;
+            }
+            if (right > GAIN_FLOAT_UNITY) {
+                right = GAIN_FLOAT_UNITY;
+            }
+            left *= v;
+            right *= v;
+            if (mAfThreadCallback->getMode() != AUDIO_MODE_IN_COMMUNICATION
                 || audio_channel_count_from_out_mask(mChannelMask) > 1) {
-            left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
-            right *= mMasterBalanceRight;
+                left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
+                right *= mMasterBalanceRight;
+            }
         }
-    }
+        track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                /*muteState=*/{mMasterMute,
+                               mStreamTypes[track->streamType()].volume == 0.f,
+                               mStreamTypes[track->streamType()].mute,
+                               track->isPlaybackRestricted(),
+                               clientVolumeMute,
+                               shaperVolume == 0.f});
+    } else {
+        if (mMasterMute || track->isPlaybackRestricted()) {
+            left = right = 0;
+        } else {
+            float typeVolume = track->getPortVolume();
+            const float v = mMasterVolume * typeVolume * shaperVolume;
 
-    track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-        /*muteState=*/{mMasterMute,
-                       mStreamTypes[track->streamType()].volume == 0.f,
-                       mStreamTypes[track->streamType()].mute,
-                       track->isPlaybackRestricted(),
-                       clientVolumeMute,
-                       shaperVolume == 0.f});
+            if (left > GAIN_FLOAT_UNITY) {
+                left = GAIN_FLOAT_UNITY;
+            }
+            if (right > GAIN_FLOAT_UNITY) {
+                right = GAIN_FLOAT_UNITY;
+            }
+            left *= v;
+            right *= v;
+            if (mAfThreadCallback->getMode() != AUDIO_MODE_IN_COMMUNICATION
+                || audio_channel_count_from_out_mask(mChannelMask) > 1) {
+                left *= mMasterBalanceLeft; // DirectOutputThread balance applied as track volume
+                right *= mMasterBalanceRight;
+            }
+        }
+        track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                /*muteState=*/{mMasterMute,
+                               track->getPortVolume() == 0.f,
+                               /* muteFromStreamMuted= */ false,
+                               track->isPlaybackRestricted(),
+                               clientVolumeMute,
+                               shaperVolume == 0.f});
+    }
 
     if (lastTrack) {
         track->setFinalVolume(left, right);
@@ -6917,7 +7086,8 @@
                     if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
                         track->retryCount() = kMaxTrackRetriesOffload;
                     } else {
-                        ALOGV("BUFFER TIMEOUT: remove track(%d) from active list", trackId);
+                        ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to"
+                              " underrun on thread %d", __func__, trackId, mId);
                         tracksToRemove->add(track);
                         // indicate to client process that the track was disabled because of
                         // underrun; it will then automatically call start() when data is available
@@ -7037,16 +7207,20 @@
 {
     bool trackPaused = false;
     bool trackStopped = false;
+    bool trackDisabled = false;
 
-    // do not put the HAL in standby when paused. AwesomePlayer clear the offloaded AudioTrack
+    // do not put the HAL in standby when paused. NuPlayer clear the offloaded AudioTrack
     // after a timeout and we will enter standby then.
+    // On offload threads, do not enter standby if the main track is still underrunning.
     if (mTracks.size() > 0) {
-        trackPaused = mTracks[mTracks.size() - 1]->isPaused();
-        trackStopped = mTracks[mTracks.size() - 1]->isStopped() ||
-                           mTracks[mTracks.size() - 1]->state() == IAfTrackBase::IDLE;
+        const auto& mainTrack = mTracks[mTracks.size() - 1];
+
+        trackPaused = mainTrack->isPaused();
+        trackStopped = mainTrack->isStopped() || mainTrack->state() == IAfTrackBase::IDLE;
+        trackDisabled = (mType == OFFLOAD) && mainTrack->isDisabled();
     }
 
-    return !mStandby && !(trackPaused || (mHwPaused && !trackStopped));
+    return !mStandby && !(trackPaused || (mHwPaused && !trackStopped) || trackDisabled);
 }
 
 // checkForNewParameter_l() must be called with ThreadBase::mutex() held
@@ -7095,7 +7269,7 @@
 uint32_t DirectOutputThread::activeSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = PlaybackThread::activeSleepTimeUs();
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7106,7 +7280,7 @@
 uint32_t DirectOutputThread::idleSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000) / 2;
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7117,7 +7291,7 @@
 uint32_t DirectOutputThread::suspendSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000);
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7134,7 +7308,7 @@
     // no delay on outputs with HW A/V sync
     if (usesHwAvSync()) {
         mStandbyDelayNs = 0;
-    } else if ((mType == OFFLOAD) && !audio_has_proportional_frames(mFormat)) {
+    } else if (mType == OFFLOAD) {
         mStandbyDelayNs = kOffloadStandbyDelayNs;
     } else {
         mStandbyDelayNs = microseconds(mActiveSleepTimeUs*2);
@@ -7170,7 +7344,7 @@
         mPlaybackThread(playbackThread),
         mWriteAckSequence(0),
         mDrainSequence(0),
-        mAsyncError(false)
+        mAsyncError(ASYNC_ERROR_NONE)
 {
 }
 
@@ -7184,7 +7358,7 @@
     while (!exitPending()) {
         uint32_t writeAckSequence;
         uint32_t drainSequence;
-        bool asyncError;
+        AsyncError asyncError;
 
         {
             audio_utils::unique_lock _l(mutex());
@@ -7205,7 +7379,7 @@
             drainSequence = mDrainSequence;
             mDrainSequence &= ~1;
             asyncError = mAsyncError;
-            mAsyncError = false;
+            mAsyncError = ASYNC_ERROR_NONE;
         }
         {
             const sp<PlaybackThread> playbackThread = mPlaybackThread.promote();
@@ -7216,8 +7390,8 @@
                 if (drainSequence & 1) {
                     playbackThread->resetDraining(drainSequence >> 1);
                 }
-                if (asyncError) {
-                    playbackThread->onAsyncError();
+                if (asyncError != ASYNC_ERROR_NONE) {
+                    playbackThread->onAsyncError(asyncError == ASYNC_ERROR_HARD);
                 }
             }
         }
@@ -7267,10 +7441,10 @@
     }
 }
 
-void AsyncCallbackThread::setAsyncError()
+void AsyncCallbackThread::setAsyncError(bool isHardError)
 {
     audio_utils::lock_guard _l(mutex());
-    mAsyncError = true;
+    mAsyncError = isHardError ? ASYNC_ERROR_HARD : ASYNC_ERROR_SOFT;
     mWaitWorkCV.notify_one();
 }
 
@@ -7514,8 +7688,8 @@
                     if (isTimestampAdvancing) { // HAL is still playing audio, give us more time.
                         track->retryCount() = kMaxTrackRetriesOffload;
                     } else {
-                        ALOGV("OffloadThread: BUFFER TIMEOUT: remove track(%d) from active list",
-                                track->id());
+                        ALOGI("%s BUFFER TIMEOUT: remove track(%d) from active list due to"
+                              " underrun on thread %d", __func__, track->id(), mId);
                         tracksToRemove->add(track);
                         // tell client process that the track was disabled because of underrun;
                         // it will then automatically call start() when data is available
@@ -7718,6 +7892,9 @@
         audio_utils::lock_guard l(mutex());
         localTracks = std::move(mOutputTracks);
         mOutputTracks.clear();
+        for (size_t i = 0; i < localTracks.size(); ++i) {
+            localTracks[i]->destroy();
+        }
     }
     localTracks.clear();
     outputTracks.clear();
@@ -7790,7 +7967,9 @@
         ALOGE("addOutputTrack() initCheck failed %d", status);
         return;
     }
-    thread->setStreamVolume(AUDIO_STREAM_PATCH, 1.0f);
+    if (!audioserver_flags::portid_volume_management()) {
+        thread->setStreamVolume(AUDIO_STREAM_PATCH, 1.0f);
+    }
     mOutputTracks.add(outputTrack);
     ALOGV("addOutputTrack() track %p, on thread %p", outputTrack.get(), thread);
     updateWaitTime_l();
@@ -8157,8 +8336,10 @@
                 afThreadCallback->newWriter_l(kFastCaptureLogSize, "FastCapture");
         state->mNBLogWriter = mFastCaptureNBLogWriter.get();
         sq->end();
-        sq->push(FastCaptureStateQueue::BLOCK_UNTIL_PUSHED);
-
+        {
+            audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastCapture->getTid());
+            sq->push(FastCaptureStateQueue::BLOCK_UNTIL_PUSHED);
+        }
         // start the fast capture
         mFastCapture->run("FastCapture", ANDROID_PRIORITY_URGENT_AUDIO);
         pid_t tid = mFastCapture->getTid();
@@ -8192,8 +8373,11 @@
         }
         state->mCommand = FastCaptureState::EXIT;
         sq->end();
-        sq->push(FastCaptureStateQueue::BLOCK_UNTIL_PUSHED);
-        mFastCapture->join();
+        {
+            audio_utils::mutex::scoped_join_wait_check queueWaitCheck(mFastCapture->getTid());
+            sq->push(FastCaptureStateQueue::BLOCK_UNTIL_PUSHED);
+            mFastCapture->join();
+        }
         mFastCapture.clear();
     }
     mAfThreadCallback->unregisterWriter(mFastCaptureNBLogWriter);
@@ -8242,6 +8426,7 @@
     for (int64_t loopCount = 0;; ++loopCount) {  // loopCount used for statistics tracking
         // Note: these sp<> are released at the end of the for loop outside of the mutex() lock.
         sp<IAfRecordTrack> activeTrack;
+        std::vector<sp<IAfRecordTrack>> oldActiveTracks;
         Vector<sp<IAfEffectChain>> effectChains;
 
         // activeTracks accumulates a copy of a subset of mActiveTracks
@@ -8291,7 +8476,9 @@
             bool doBroadcast = false;
             bool allStopped = true;
             for (size_t i = 0; i < size; ) {
-
+                if (activeTrack) {  // ensure track release is outside lock.
+                    oldActiveTracks.emplace_back(std::move(activeTrack));
+                }
                 activeTrack = mActiveTracks[i];
                 if (activeTrack->isTerminated()) {
                     if (activeTrack->isFastTrack()) {
@@ -8627,6 +8814,9 @@
 
         // loop over each active track
         for (size_t i = 0; i < size; i++) {
+            if (activeTrack) {  // ensure track release is outside lock.
+                oldActiveTracks.emplace_back(std::move(activeTrack));
+            }
             activeTrack = activeTracks[i];
 
             // skip fast tracks, as those are handled directly by FastCapture
@@ -8770,11 +8960,14 @@
             mIoJitterMs.add(jitterMs);
             mProcessTimeMs.add(processMs);
         }
+       mThreadloopExecutor.process();
         // update timing info.
         mLastIoBeginNs = lastIoBeginNs;
         mLastIoEndNs = lastIoEndNs;
         lastLoopCountRead = loopCount;
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     standbyIfNotAlreadyInStandby();
 
@@ -8817,7 +9010,11 @@
             mFastCaptureFutex = 0;
             sq->end();
             // BLOCK_UNTIL_PUSHED would be insufficient, as we need it to stop doing I/O now
-            sq->push(FastCaptureStateQueue::BLOCK_UNTIL_ACKED);
+            {
+                audio_utils::mutex::scoped_queue_wait_check queueWaitCheck(mFastCapture->getTid());
+                sq->push(FastCaptureStateQueue::BLOCK_UNTIL_ACKED);
+            }
+
 #if 0
             if (kUseFastCapture == FastCapture_Dynamic) {
                 // FIXME
@@ -9167,7 +9364,7 @@
     // This is needed for proper patchRecord peer release.
     while (recordTrack->state() == IAfTrackBase::PAUSING && !recordTrack->isInvalid()) {
         mWaitWorkCV.notify_all(); // signal thread to stop
-        mStartStopCV.wait(_l);
+        mStartStopCV.wait(_l, getTid());
     }
 
     if (recordTrack->state() == IAfTrackBase::PAUSED) { // successful stop
@@ -10248,11 +10445,27 @@
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
 
     audio_io_handle_t io = mId;
-    const AttributionSourceState adjAttributionSource = afutils::checkAttributionSourcePackage(
-            client.attributionSource);
+    AttributionSourceState adjAttributionSource;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                client.attributionSource);
+    } else {
+        // TODO(b/342475009) validate in oboeservice, and plumb downwards
+        auto validatedRes = ValidatedAttributionSourceState::createFromTrustedUidNoPackage(
+                    client.attributionSource,
+                    mAfThreadCallback->getPermissionProvider()
+                );
+        if (!validatedRes.has_value()) {
+            ALOGE("MMAP client package validation fail: %s",
+                    validatedRes.error().toString8().c_str());
+            return aidl_utils::statusTFromBinderStatus(validatedRes.error());
+        }
+        adjAttributionSource = std::move(validatedRes.value()).unwrapInto();
+    }
 
     const auto localSessionId = mSessionId;
     auto localAttr = mAttr;
+    float volume = 0.0f;
     if (isOutput()) {
         audio_config_t config = AUDIO_CONFIG_INITIALIZER;
         config.sample_rate = mSampleRate;
@@ -10276,7 +10489,8 @@
                                             &portId,
                                             &secondaryOutputs,
                                             &isSpatialized,
-                                            &isBitPerfect);
+                                            &isBitPerfect,
+                                            &volume);
         mutex().lock();
         mAttr = localAttr;
         ALOGD_IF(!secondaryOutputs.empty(),
@@ -10345,7 +10559,8 @@
             this, attr == nullptr ? mAttr : *attr, mSampleRate, mFormat,
                                         mChannelMask, mSessionId, isOutput(),
                                         client.attributionSource,
-                                        IPCThreadState::self()->getCallingPid(), portId);
+                                        IPCThreadState::self()->getCallingPid(), portId,
+                                        volume);
     if (!isOutput()) {
         track->setSilenced_l(isClientSilenced_l(portId));
     }
@@ -10560,7 +10775,10 @@
         unlockEffectChains(effectChains);
         // Effect chains will be actually deleted here if they were removed from
         // mEffectChains list during mixing or effects processing
+        mThreadloopExecutor.process();
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     threadLoop_exit();
 
@@ -10927,18 +11145,18 @@
     mChannelCount = audio_channel_count_from_out_mask(mChannelMask);
     mMasterVolume = afThreadCallback->masterVolume_l();
     mMasterMute = afThreadCallback->masterMute_l();
-
-    for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
-        const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
-        mStreamTypes[stream].volume = 0.0f;
-        mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+    if (!audioserver_flags::portid_volume_management()) {
+        for (int i = AUDIO_STREAM_MIN; i < AUDIO_STREAM_FOR_POLICY_CNT; ++i) {
+            const audio_stream_type_t stream{static_cast<audio_stream_type_t>(i)};
+            mStreamTypes[stream].volume = 0.0f;
+            mStreamTypes[stream].mute = mAfThreadCallback->streamMute_l(stream);
+        }
+        // Audio patch and call assistant volume are always max
+        mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
+        mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
+        mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
+        mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
     }
-    // Audio patch and call assistant volume are always max
-    mStreamTypes[AUDIO_STREAM_PATCH].volume = 1.0f;
-    mStreamTypes[AUDIO_STREAM_PATCH].mute = false;
-    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].volume = 1.0f;
-    mStreamTypes[AUDIO_STREAM_CALL_ASSISTANT].mute = false;
-
     if (mAudioHwDev) {
         if (mAudioHwDev->canSetMasterVolume()) {
             mMasterVolume = 1.0;
@@ -11017,6 +11235,21 @@
     }
 }
 
+status_t MmapPlaybackThread::setPortsVolume(
+        const std::vector<audio_port_handle_t>& portIds, float volume) {
+    audio_utils::lock_guard _l(mutex());
+    for (const auto& portId : portIds) {
+        for (const sp<IAfMmapTrack>& track : mActiveTracks) {
+            if (portId == track->portId()) {
+                track->setPortVolume(volume);
+                break;
+            }
+        }
+    }
+    broadcast_l();
+    return NO_ERROR;
+}
+
 void MmapPlaybackThread::invalidateTracks(audio_stream_type_t streamType)
 {
     audio_utils::lock_guard _l(mutex());
@@ -11050,14 +11283,26 @@
 void MmapPlaybackThread::processVolume_l()
 NO_THREAD_SAFETY_ANALYSIS // access of track->processMuteEvent_l
 {
-    float volume;
-
-    if (mMasterMute || streamMuted_l()) {
-        volume = 0;
+    float volume = 0;
+    if (!audioserver_flags::portid_volume_management()) {
+        if (mMasterMute || streamMuted_l()) {
+            volume = 0;
+        } else {
+            volume = mMasterVolume * streamVolume_l();
+        }
     } else {
-        volume = mMasterVolume * streamVolume_l();
+        if (mMasterMute) {
+            volume = 0;
+        } else {
+            // All mmap tracks are declared with the same audio attributes to the audio policy
+            // manager. Hence, they follow the same routing / volume group. Any change of volume
+            // will be broadcasted to all tracks. Thus, take arbitrarily first track volume.
+            size_t numtracks = mActiveTracks.size();
+            if (numtracks) {
+                volume = mMasterVolume * mActiveTracks[0]->getPortVolume();
+            }
+        }
     }
-
     if (volume != mHalVolFloat) {
         // Convert volumes from float to 8.24
         uint32_t vol = (uint32_t)(volume * (1 << 24));
@@ -11090,14 +11335,25 @@
         }
         for (const sp<IAfMmapTrack>& track : mActiveTracks) {
             track->setMetadataHasChanged();
-            track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
-                /*muteState=*/{mMasterMute,
-                               streamVolume_l() == 0.f,
-                               streamMuted_l(),
-                               // TODO(b/241533526): adjust logic to include mute from AppOps
-                               false /*muteFromPlaybackRestricted*/,
-                               false /*muteFromClientVolume*/,
-                               false /*muteFromVolumeShaper*/});
+            if (!audioserver_flags::portid_volume_management()) {
+                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                        /*muteState=*/{mMasterMute,
+                        streamVolume_l() == 0.f,
+                        streamMuted_l(),
+                        // TODO(b/241533526): adjust logic to include mute from AppOps
+                        false /*muteFromPlaybackRestricted*/,
+                        false /*muteFromClientVolume*/,
+                        false /*muteFromVolumeShaper*/});
+            } else {
+                track->processMuteEvent_l(mAfThreadCallback->getOrCreateAudioManager(),
+                    /*muteState=*/{mMasterMute,
+                                   track->getPortVolume() == 0.f,
+                                   /* muteFromStreamMuted= */ false,
+                                   // TODO(b/241533526): adjust logic to include mute from AppOps
+                                   false /*muteFromPlaybackRestricted*/,
+                                   false /*muteFromClientVolume*/,
+                                   false /*muteFromVolumeShaper*/});
+                }
         }
     }
 }
@@ -11204,9 +11460,13 @@
 void MmapPlaybackThread::dumpInternals_l(int fd, const Vector<String16>& args)
 {
     MmapThread::dumpInternals_l(fd, args);
-
-    dprintf(fd, "  Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d\n",
-            mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
+    if (!audioserver_flags::portid_volume_management()) {
+        dprintf(fd, "  Stream type: %d Stream volume: %f HAL volume: %f Stream mute %d",
+                mStreamType, streamVolume_l(), mHalVolFloat, streamMuted_l());
+    } else {
+        dprintf(fd, "  HAL volume: %f", mHalVolFloat);
+    }
+    dprintf(fd, "\n");
     dprintf(fd, "  Master volume: %f Master mute %d\n", mMasterVolume, mMasterMute);
 }
 
@@ -11345,14 +11605,15 @@
     // If there is only one active track and it is bit-perfect, enable tee buffer.
     float volumeLeft = 1.0f;
     float volumeRight = 1.0f;
-    if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
-        const int trackId = mActiveTracks[0]->id();
+    if (sp<IAfTrack> bitPerfectTrack = getTrackToStreamBitPerfectly_l();
+        bitPerfectTrack != nullptr) {
+        const int trackId = bitPerfectTrack->id();
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER, (void *)mSinkBuffer);
         mAudioMixer->setParameter(
                     trackId, AudioMixer::TRACK, AudioMixer::TEE_BUFFER_FRAME_COUNT,
                     (void *)(uintptr_t)mNormalFrameCount);
-        mActiveTracks[0]->getFinalVolume(&volumeLeft, &volumeRight);
+        bitPerfectTrack->getFinalVolume(&volumeLeft, &volumeRight);
         mIsBitPerfect = true;
     } else {
         mIsBitPerfect = false;
@@ -11377,4 +11638,45 @@
     mHasDataCopiedToSinkBuffer = mIsBitPerfect;
 }
 
+void BitPerfectThread::setTracksInternalMute(
+        std::map<audio_port_handle_t, bool>* tracksInternalMute) {
+    audio_utils::lock_guard _l(mutex());
+    for (auto& track : mTracks) {
+        if (auto it = tracksInternalMute->find(track->portId()); it != tracksInternalMute->end()) {
+            track->setInternalMute(it->second);
+            tracksInternalMute->erase(it);
+        }
+    }
+}
+
+sp<IAfTrack> BitPerfectThread::getTrackToStreamBitPerfectly_l() {
+    if (com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        sp<IAfTrack> bitPerfectTrack = nullptr;
+        bool allOtherTracksMuted = true;
+        // Return the bit perfect track if all other tracks are muted
+        for (const auto& track : mActiveTracks) {
+            if (track->isBitPerfect()) {
+                if (track->getInternalMute()) {
+                    // There can only be one bit-perfect client active. If it is mute internally,
+                    // there is no need to stream bit-perfectly.
+                    break;
+                }
+                bitPerfectTrack = track;
+            } else if (track->getFinalVolume() != 0.f) {
+                allOtherTracksMuted = false;
+                if (bitPerfectTrack != nullptr) {
+                    break;
+                }
+            }
+        }
+        return allOtherTracksMuted ? bitPerfectTrack : nullptr;
+    } else {
+        if (mActiveTracks.size() == 1 && mActiveTracks[0]->isBitPerfect()) {
+            return mActiveTracks[0];
+        }
+    }
+    return nullptr;
+}
+
 } // namespace android
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 86e1894..bf37238 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -33,6 +33,7 @@
 #include <fastpath/FastMixer.h>
 #include <mediautils/Synchronization.h>
 #include <mediautils/ThreadSnapshot.h>
+#include <psh_utils/Token.h>
 #include <timing/MonotonicFrameCounter.h>
 #include <utils/Log.h>
 
@@ -379,10 +380,10 @@
         return isOutput() ? outDeviceTypes_l() : DeviceTypeSet({inDeviceType_l()});
     }
 
-    const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const final {
+    const AudioDeviceTypeAddrVector& outDeviceTypeAddrs() const final REQUIRES(mutex()) {
         return mOutDeviceTypeAddrs;
     }
-    const AudioDeviceTypeAddr& inDeviceTypeAddr() const final {
+    const AudioDeviceTypeAddr& inDeviceTypeAddr() const final REQUIRES(mutex()) {
         return mInDeviceTypeAddr;
     }
 
@@ -571,6 +572,10 @@
     void stopMelComputation_l() override
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
+    audio_utils::DeferredExecutor& getThreadloopExecutor() override {
+        return mThreadloopExecutor;
+    }
+
 protected:
 
                 // entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -618,7 +623,8 @@
      * ThreadBase_Mutex during this time.  No other mutex is held.
      */
 
-    void waitWhileThreadBusy_l(audio_utils::unique_lock& ul) final REQUIRES(mutex()) {
+    void waitWhileThreadBusy_l(audio_utils::unique_lock<audio_utils::mutex>& ul)
+            final REQUIRES(mutex()) {
         // the wait returns immediately if the predicate is satisfied.
         mThreadBusyCv.wait(ul, [&]{ return mThreadBusy == false;});
     }
@@ -721,6 +727,7 @@
                 char                    mThreadName[kThreadNameLength]; // guaranteed NUL-terminated
     sp<os::IPowerManager> mPowerManager GUARDED_BY(mutex());
     sp<IBinder> mWakeLockToken GUARDED_BY(mutex());
+    std::unique_ptr<media::psh_utils::Token> mThreadToken GUARDED_BY(mutex());
                 const sp<PMDeathRecipient> mDeathRecipient;
                 // list of suspended effects per session and per type. The first (outer) vector is
                 // keyed by session ID, the second (inner) by type UUID timeLow field
@@ -832,6 +839,12 @@
                     typename SortedVector<sp<T>>::iterator end() {
                         return mActiveTracks.end();
                     }
+                    typename SortedVector<const sp<T>>::iterator begin() const {
+                        return mActiveTracks.begin();
+                    }
+                    typename SortedVector<const sp<T>>::iterator end() const {
+                        return mActiveTracks.end();
+                    }
 
                     // Due to Binder recursion optimization, clear() and updatePowerState()
                     // cannot be called from a Binder thread because they may call back into
@@ -877,6 +890,14 @@
 
                 SimpleLog mLocalLog;  // locked internally
 
+    // mThreadloopExecutor contains deferred functors and object (dtors) to
+    // be executed at the end of the processing period, without any
+    // mutexes held.
+    //
+    // mThreadloopExecutor is locked internally, so its methods are thread-safe
+    // for access.
+    audio_utils::DeferredExecutor mThreadloopExecutor;
+
     private:
     void dumpBase_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
     void dumpEffectChains_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
@@ -948,7 +969,7 @@
     // StreamOutHalInterfaceCallback implementation
     virtual     void        onWriteReady();
     virtual     void        onDrainReady();
-    virtual     void        onError();
+    virtual     void        onError(bool /*isHardError*/);
 
 public: // AsyncCallbackThread
                 void        resetWriteBlocked(uint32_t sequence);
@@ -960,7 +981,7 @@
     virtual bool shouldStandby_l() REQUIRES(mutex(), ThreadBase_ThreadLoop);
     virtual void onAddNewTrack_l() REQUIRES(mutex());
 public:  // AsyncCallbackThread
-                void        onAsyncError(); // error reported by AsyncCallbackThread
+                void        onAsyncError(bool isHardError); // error reported by AsyncCallbackThread
 protected:
     // StreamHalInterfaceCodecFormatCallback implementation
                 void        onCodecFormatChanged(
@@ -999,6 +1020,9 @@
     void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
     void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
     float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
+            final EXCLUDES_ThreadBase_Mutex;
+
     void setVolumeForOutput_l(float left, float right) const final;
 
     sp<IAfTrack> createTrack_l(
@@ -1023,7 +1047,8 @@
                                 const sp<media::IAudioTrackCallback>& callback,
                                 bool isSpatialized,
                                 bool isBitPerfect,
-                                audio_output_flags_t* afTrackFlags) final
+                                audio_output_flags_t* afTrackFlags,
+                                float volume) final
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
     bool isTrackActive(const sp<IAfTrack>& track) const final {
@@ -1200,6 +1225,11 @@
                     }
                     return mHalStarted;
                 }
+
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* /* tracksInternalMute */)
+            override EXCLUDES_ThreadBase_Mutex {
+        // Do nothing. It is only used for bit perfect thread
+    }
 protected:
     // updated by readOutputParameters_l()
     size_t                          mNormalFrameCount;  // normal mixer and effects
@@ -1373,6 +1403,8 @@
     bool destroyTrack_l(const sp<IAfTrack>& track) final REQUIRES(mutex());
 
     void removeTrack_l(const sp<IAfTrack>& track) REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds_l() REQUIRES(mutex());
+    std::set<audio_port_handle_t> getTrackPortIds();
 
     void readOutputParameters_l() REQUIRES(mutex());
     MetadataUpdate updateMetadata_l() final REQUIRES(mutex());
@@ -1836,7 +1868,7 @@
             void        resetWriteBlocked();
             void        setDraining(uint32_t sequence);
             void        resetDraining();
-            void        setAsyncError();
+            void        setAsyncError(bool isHardError);
 
 private:
     const wp<PlaybackThread>   mPlaybackThread;
@@ -1850,7 +1882,8 @@
     uint32_t                   mDrainSequence;
     audio_utils::condition_variable mWaitWorkCV;
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAsyncCallbackThread_Mutex};
-    bool                       mAsyncError;
+    enum AsyncError { ASYNC_ERROR_NONE, ASYNC_ERROR_SOFT, ASYNC_ERROR_HARD };
+    AsyncError                 mAsyncError;
 
     audio_utils::mutex& mutex() const RETURN_CAPABILITY(audio_utils::AsyncCallbackThread_Mutex) {
         return mMutex;
@@ -2365,6 +2398,8 @@
     void setStreamVolume(audio_stream_type_t stream, float value) final EXCLUDES_ThreadBase_Mutex;
     void setStreamMute(audio_stream_type_t stream, bool muted) final EXCLUDES_ThreadBase_Mutex;
     float streamVolume(audio_stream_type_t stream) const final EXCLUDES_ThreadBase_Mutex;
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& portIds, float volume)
+            final EXCLUDES_ThreadBase_Mutex;
 
     void setMasterMute_l(bool muted) REQUIRES(mutex()) { mMasterMute = muted; }
 
@@ -2449,12 +2484,17 @@
     BitPerfectThread(const sp<IAfThreadCallback>& afThreadCallback, AudioStreamOut *output,
                      audio_io_handle_t id, bool systemReady);
 
+    void setTracksInternalMute(std::map<audio_port_handle_t, bool>* tracksInternalMuted)
+            final EXCLUDES_ThreadBase_Mutex;
+
 protected:
     mixer_state prepareTracks_l(Vector<sp<IAfTrack>>* tracksToRemove) final
             REQUIRES(mutex(), ThreadBase_ThreadLoop);
     void threadLoop_mix() final REQUIRES(ThreadBase_ThreadLoop);
 
 private:
+    sp<IAfTrack> getTrackToStreamBitPerfectly_l() REQUIRES(mutex());
+
     // These variables are only accessed on the threadLoop; hence need no mutex.
     bool mIsBitPerfect GUARDED_BY(ThreadBase_ThreadLoop) = false;
     float mVolumeLeft GUARDED_BY(ThreadBase_ThreadLoop) = 0.f;
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 5708c61..cde7fc2 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -24,6 +24,7 @@
 #include <android-base/macros.h>  // DISALLOW_COPY_AND_ASSIGN
 #include <datapath/TrackMetrics.h>
 #include <mediautils/BatteryNotifier.h>
+#include <psh_utils/AudioPowerManager.h>
 
 #include <atomic>    // avoid transitive dependency
 #include <list>      // avoid transitive dependency
@@ -240,17 +241,13 @@
      * Called when a track moves to active state to record its contribution to battery usage.
      * Track state transitions should eventually be handled within the track class.
      */
-    void beginBatteryAttribution() final {
-        mBatteryStatsHolder.emplace(uid());
-    }
+    void beginBatteryAttribution() final;
 
     /**
      * Called when a track moves out of the active state to record its contribution
      * to battery usage.
      */
-    void endBatteryAttribution() final {
-        mBatteryStatsHolder.reset();
-    }
+    void endBatteryAttribution() final;
 
 protected:
     DISALLOW_COPY_AND_ASSIGN(TrackBase);
@@ -333,6 +330,9 @@
                                     // true for Track, false for RecordTrack,
                                     // this could be a track type if needed later
 
+    void deferRestartIfDisabled();
+    virtual void restartIfDisabled() {}
+
     const wp<IAfThreadBase> mThread;
     const alloc_type     mAllocType;
     /*const*/ sp<Client> mClient;   // see explanation at ~TrackBase() why not const
@@ -344,7 +344,7 @@
     size_t              mBufferSize; // size of mBuffer in bytes
     // we don't really need a lock for these
     MirroredVariable<track_state>  mState;
-    const audio_attributes_t mAttr;
+    audio_attributes_t  mAttr;
     const uint32_t      mSampleRate;    // initial sample rate only; for tracks which
                         // support dynamic rates, the current value is in control block
     const audio_format_t mFormat;
@@ -397,6 +397,7 @@
     std::atomic_flag    mChangeNotified = ATOMIC_FLAG_INIT;
     // RAII object for battery stats book-keeping
     std::optional<mediautils::BatteryStatsAudioHandle> mBatteryStatsHolder;
+    std::unique_ptr<media::psh_utils::Token> mTrackToken;
 };
 
 class PatchTrackBase : public PatchProxyBufferProvider, public virtual IAfPatchTrackBase
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 8fa4b06..a692773 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -314,6 +314,29 @@
     return NO_ERROR;
 }
 
+void TrackBase::deferRestartIfDisabled()
+{
+    const auto thread = mThread.promote();
+    if (thread == nullptr) return;
+    auto weakTrack = wp<TrackBase>::fromExisting(this);
+    thread->getThreadloopExecutor().defer([weakTrack] {
+            const auto actual = weakTrack.promote();
+            if (actual) actual->restartIfDisabled();
+        });
+}
+
+void TrackBase::beginBatteryAttribution() {
+    mBatteryStatsHolder.emplace(uid());
+    if (media::psh_utils::AudioPowerManager::enabled()) {
+        mTrackToken = media::psh_utils::createAudioTrackToken(uid());
+    }
+}
+
+void TrackBase::endBatteryAttribution() {
+    mBatteryStatsHolder.reset();
+    mTrackToken.reset();
+}
+
 PatchTrackBase::PatchTrackBase(const sp<ClientProxy>& proxy,
         IAfThreadBase* thread, const Timeout& timeout)
     : mProxy(proxy)
@@ -704,7 +727,8 @@
         size_t frameCountToBeReady,
         float speed,
         bool isSpatialized,
-        bool isBitPerfect) {
+        bool isBitPerfect,
+        float volume) {
     return sp<Track>::make(thread,
             client,
             streamType,
@@ -725,7 +749,8 @@
             frameCountToBeReady,
             speed,
             isSpatialized,
-            isBitPerfect);
+            isBitPerfect,
+            volume);
 }
 
 // Track constructor must be called with AudioFlinger::mLock and ThreadBase::mLock held
@@ -750,7 +775,8 @@
             size_t frameCountToBeReady,
             float speed,
             bool isSpatialized,
-            bool isBitPerfect)
+            bool isBitPerfect,
+            float volume)
     :   TrackBase(thread, client, attr, sampleRate, format, channelMask, frameCount,
                   // TODO: Using unsecurePointer() has some associated security pitfalls
                   //       (see declaration for details).
@@ -786,7 +812,8 @@
     mFlags(flags),
     mSpeed(speed),
     mIsSpatialized(isSpatialized),
-    mIsBitPerfect(isBitPerfect)
+    mIsBitPerfect(isBitPerfect),
+    mVolume(volume)
 {
     // client == 0 implies sharedBuffer == 0
     ALOG_ASSERT(!(client == 0 && sharedBuffer != 0));
@@ -832,6 +859,14 @@
         thread->fastTrackAvailMask_l() &= ~(1 << i);
     }
 
+    populateUsageAndContentTypeFromStreamType();
+
+    // Audio patch and call assistant volume are always max
+    if (mAttr.usage == AUDIO_USAGE_CALL_ASSISTANT
+            || mAttr.usage == AUDIO_USAGE_VIRTUAL_SOURCE) {
+        mVolume = 1.0f;
+    }
+
     mServerLatencySupported = checkServerLatencySupported(format, flags);
 #ifdef TEE_SINK
     mTee.setId(std::string("_") + std::to_string(mThreadIoHandle)
@@ -854,6 +889,62 @@
     mTrackMetrics.logConstructor(creatorPid, uid, id(), traits, streamType);
 }
 
+// When attributes are undefined, derive default values from stream type.
+// See AudioAttributes.java, usageForStreamType() and Builder.setInternalLegacyStreamType()
+void Track::populateUsageAndContentTypeFromStreamType() {
+    if (mAttr.usage == AUDIO_USAGE_UNKNOWN) {
+        switch (mStreamType) {
+        case AUDIO_STREAM_VOICE_CALL:
+            mAttr.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_SYSTEM:
+            mAttr.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_RING:
+            mAttr.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_MUSIC:
+            mAttr.usage = AUDIO_USAGE_MEDIA;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+            break;
+        case AUDIO_STREAM_ALARM:
+            mAttr.usage = AUDIO_USAGE_ALARM;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_NOTIFICATION:
+            mAttr.usage = AUDIO_USAGE_NOTIFICATION;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_DTMF:
+            mAttr.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
+            break;
+        case AUDIO_STREAM_ACCESSIBILITY:
+            mAttr.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_ASSISTANT:
+            mAttr.usage = AUDIO_USAGE_ASSISTANT;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        case AUDIO_STREAM_REROUTING:
+        case AUDIO_STREAM_PATCH:
+            mAttr.usage = AUDIO_USAGE_VIRTUAL_SOURCE;
+            // unknown content type
+            break;
+        case AUDIO_STREAM_CALL_ASSISTANT:
+            mAttr.usage = AUDIO_USAGE_CALL_ASSISTANT;
+            mAttr.content_type = AUDIO_CONTENT_TYPE_SPEECH;
+            break;
+        default:
+            break;
+        }
+    }
+}
+
 Track::~Track()
 {
     ALOGV("%s(%d)", __func__, mId);
@@ -912,8 +1003,8 @@
     result.appendFormat("Type     Id Active Client Session Port Id S  Flags "
                         "  Format Chn mask  SRate "
                         "ST Usg CT "
-                        " G db  L dB  R dB  VS dB "
-                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect"
+                        " G db  L dB  R dB  VS dB  PortVol dB "
+                        "  Server FrmCnt  FrmRdy F Underruns  Flushed BitPerfect InternalMute"
                         "%s\n",
                         isServerLatencySupported() ? "   Latency" : "");
 }
@@ -998,8 +1089,8 @@
     result.appendFormat("%7s %6u %7u %7u %2s 0x%03X "
                         "%08X %08X %6u "
                         "%2u %3x %2x "
-                        "%5.2g %5.2g %5.2g %5.2g%c "
-                        "%08X %6zu%c %6zu %c %9u%c %7u %10s",
+                        "%5.2g %5.2g %5.2g %5.2g%c %11.2g "
+                        "%08X %6zu%c %6zu %c %9u%c %7u %10s %12s",
             active ? "yes" : "no",
             (mClient == 0) ? getpid() : mClient->pid(),
             mSessionId,
@@ -1020,6 +1111,7 @@
             20.0 * log10(float_from_gain(gain_minifloat_unpack_right(vlr))),
             20.0 * log10(vsVolume.first), // VolumeShaper(s) total volume
             vsVolume.second ? 'A' : ' ',  // if any VolumeShapers active
+            20.0 * log10(mVolume),
 
             mCblk->mServer,
             bufferSizeInFrames,
@@ -1029,7 +1121,8 @@
             mAudioTrackServerProxy->getUnderrunFrames(),
             nowInUnderrun,
             (unsigned)mAudioTrackServerProxy->framesFlushed() % 10000000,
-            isBitPerfect() ? "true" : "false"
+            isBitPerfect() ? "true" : "false",
+            getInternalMute() ? "true" : "false"
             );
 
     if (isServerLatencySupported()) {
@@ -1520,6 +1613,16 @@
     return INVALID_OPERATION;
 }
 
+void Track::setPortVolume(float volume) {
+    mVolume = volume;
+    if (mType != TYPE_PATCH) {
+        // Do not recursively propagate a PatchTrack setPortVolume to
+        // downstream PatchTracks.
+        forEachTeePatchTrack_l([volume](const auto& patchTrack) {
+                patchTrack->setPortVolume(volume); });
+    }
+}
+
 VolumeShaper::Status Track::applyVolumeShaper(
         const sp<VolumeShaper::Configuration>& configuration,
         const sp<VolumeShaper::Operation>& operation)
@@ -1575,59 +1678,6 @@
             .gain = mFinalVolume,
     };
 
-    // When attributes are undefined, derive default values from stream type.
-    // See AudioAttributes.java, usageForStreamType() and Builder.setInternalLegacyStreamType()
-    if (mAttr.usage == AUDIO_USAGE_UNKNOWN) {
-        switch (mStreamType) {
-        case AUDIO_STREAM_VOICE_CALL:
-            metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
-            break;
-        case AUDIO_STREAM_SYSTEM:
-            metadata.base.usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_RING:
-            metadata.base.usage = AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_MUSIC:
-            metadata.base.usage = AUDIO_USAGE_MEDIA;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_MUSIC;
-            break;
-        case AUDIO_STREAM_ALARM:
-            metadata.base.usage = AUDIO_USAGE_ALARM;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_NOTIFICATION:
-            metadata.base.usage = AUDIO_USAGE_NOTIFICATION;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_DTMF:
-            metadata.base.usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
-            break;
-        case AUDIO_STREAM_ACCESSIBILITY:
-            metadata.base.usage = AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
-            break;
-        case AUDIO_STREAM_ASSISTANT:
-            metadata.base.usage = AUDIO_USAGE_ASSISTANT;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
-            break;
-        case AUDIO_STREAM_REROUTING:
-            metadata.base.usage = AUDIO_USAGE_VIRTUAL_SOURCE;
-            // unknown content type
-            break;
-        case AUDIO_STREAM_CALL_ASSISTANT:
-            metadata.base.usage = AUDIO_USAGE_CALL_ASSISTANT;
-            metadata.base.content_type = AUDIO_CONTENT_TYPE_SPEECH;
-            break;
-        default:
-            break;
-        }
-    }
-
     metadata.channel_mask = mChannelMask;
     strncpy(metadata.tags, mAttr.tags, AUDIO_ATTRIBUTES_TAGS_MAX_SIZE);
     *backInserter++ = metadata;
@@ -1881,6 +1931,12 @@
     signalClientFlag(CBLK_DISABLED);
 }
 
+bool Track::isDisabled() const {
+    audio_track_cblk_t* cblk = mCblk;
+    return (cblk != nullptr)
+            && ((android_atomic_release_load(&cblk->mFlags) & CBLK_DISABLED) != 0);
+}
+
 void Track::signalClientFlag(int32_t flag)
 {
     // FIXME should use proxy, and needs work
@@ -2173,14 +2229,13 @@
             size_t frameCount,
             const AttributionSourceState& attributionSource)
     :   Track(playbackThread, NULL, AUDIO_STREAM_PATCH,
-              audio_attributes_t{} /* currently unused for output track */,
+              AUDIO_ATTRIBUTES_INITIALIZER ,
               sampleRate, format, channelMask, frameCount,
               nullptr /* buffer */, (size_t)0 /* bufferSize */, nullptr /* sharedBuffer */,
               AUDIO_SESSION_NONE, getpid(), attributionSource, AUDIO_OUTPUT_FLAG_NONE,
               TYPE_OUTPUT),
     mActive(false), mSourceThread(sourceThread)
 {
-
     if (mCblk != NULL) {
         mOutBuffer.frameCount = 0;
         playbackThread->addOutputTrack_l(this);
@@ -2303,7 +2358,7 @@
                 waitTimeLeftMs = 0;
             }
             if (status == NOT_ENOUGH_DATA) {
-                restartIfDisabled();
+                deferRestartIfDisabled();
                 continue;
             }
         }
@@ -2315,7 +2370,7 @@
         buf.mFrameCount = outFrames;
         buf.mRaw = NULL;
         mClientProxy->releaseBuffer(&buf);
-        restartIfDisabled();
+        deferRestartIfDisabled();
         pInBuffer->frameCount -= outFrames;
         pInBuffer->raw = (int8_t *)pInBuffer->raw + outFrames * mFrameSize;
         mOutBuffer.frameCount -= outFrames;
@@ -2442,10 +2497,12 @@
         size_t bufferSize,
         audio_output_flags_t flags,
         const Timeout& timeout,
-        size_t frameCountToBeReady /** Default behaviour is to start
+        size_t frameCountToBeReady, /** Default behaviour is to start
                                          *  as soon as possible to have
                                          *  the lowest possible latency
-                                         *  even if it might glitch. */)
+                                         *  even if it might glitch. */
+        float speed,
+        float volume)
 {
     return sp<PatchTrack>::make(
             playbackThread,
@@ -2458,7 +2515,9 @@
             bufferSize,
             flags,
             timeout,
-            frameCountToBeReady);
+            frameCountToBeReady,
+            speed,
+            volume);
 }
 
 PatchTrack::PatchTrack(IAfPlaybackThread* playbackThread,
@@ -2471,17 +2530,28 @@
                                                      size_t bufferSize,
                                                      audio_output_flags_t flags,
                                                      const Timeout& timeout,
-                                                     size_t frameCountToBeReady)
+                                                     size_t frameCountToBeReady,
+                                                     float speed,
+                                                     float volume)
     :   Track(playbackThread, NULL, streamType,
-              audio_attributes_t{} /* currently unused for patch track */,
+              AUDIO_ATTRIBUTES_INITIALIZER,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
               AUDIO_SESSION_NONE, getpid(), audioServerAttributionSource(getpid()), flags,
-              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady),
-        PatchTrackBase(mCblk ? new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true)
-                        : nullptr,
+              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady, speed,
+              false /*isSpatialized*/, false /*isBitPerfect*/, volume),
+        PatchTrackBase(mCblk ? new AudioTrackClientProxy(mCblk, mBuffer, frameCount, mFrameSize,
+                        true /*clientInServer*/) : nullptr,
                        playbackThread, timeout)
 {
+    if (mProxy != nullptr) {
+        sp<AudioTrackClientProxy>::cast(mProxy)->setPlaybackRate({
+                /* .mSpeed = */ speed,
+                /* .mPitch = */ AUDIO_TIMESTRETCH_PITCH_NORMAL,
+                /* .mStretchMode = */ AUDIO_TIMESTRETCH_STRETCH_DEFAULT,
+                /* .mFallbackMode = */ AUDIO_TIMESTRETCH_FALLBACK_FAIL
+        });
+    }
     ALOGV("%s(%d): sampleRate %d mPeerTimeout %d.%03d sec",
                                       __func__, mId, sampleRate,
                                       (int)mPeerTimeout.tv_sec,
@@ -2559,7 +2629,7 @@
     const size_t originalFrameCount = buffer->mFrameCount;
     do {
         if (status == NOT_ENOUGH_DATA) {
-            restartIfDisabled();
+            deferRestartIfDisabled();
             buffer->mFrameCount = originalFrameCount; // cleared on error, must be restored.
         }
         status = mProxy->obtainBuffer(buffer, timeOut);
@@ -2570,7 +2640,7 @@
 void PatchTrack::releaseBuffer(Proxy::Buffer* buffer)
 {
     mProxy->releaseBuffer(buffer);
-    restartIfDisabled();
+    deferRestartIfDisabled();
 
     // Check if the PatchTrack has enough data to write once in releaseBuffer().
     // If not, prevent an underrun from occurring by moving the track into FS_FILLING;
@@ -3453,7 +3523,8 @@
           bool isOut,
           const android::content::AttributionSourceState& attributionSource,
           pid_t creatorPid,
-          audio_port_handle_t portId)
+          audio_port_handle_t portId,
+          float volume)
 {
     return sp<MmapTrack>::make(
             thread,
@@ -3465,7 +3536,8 @@
             isOut,
             attributionSource,
             creatorPid,
-            portId);
+            portId,
+            volume);
 }
 
 MmapTrack::MmapTrack(IAfThreadBase* thread,
@@ -3477,7 +3549,8 @@
         bool isOut,
         const AttributionSourceState& attributionSource,
         pid_t creatorPid,
-        audio_port_handle_t portId)
+        audio_port_handle_t portId,
+        float volume)
     :   TrackBase(thread, NULL, attr, sampleRate, format,
                   channelMask, (size_t)0 /* frameCount */,
                   nullptr /* buffer */, (size_t)0 /* bufferSize */,
@@ -3488,10 +3561,15 @@
                   TYPE_DEFAULT, portId,
                   std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_MMAP) + std::to_string(portId)),
         mPid(VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.pid))),
-            mSilenced(false), mSilencedNotified(false)
+            mSilenced(false), mSilencedNotified(false), mVolume(volume)
 {
     // Once this item is logged by the server, the client can add properties.
     mTrackMetrics.logConstructor(creatorPid, uid(), id());
+    if (isOut && (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
+            || attr.usage == AUDIO_USAGE_VIRTUAL_SOURCE)) {
+        // Audio patch and call assistant volume are always max
+        mVolume = 1.0f;
+    }
 }
 
 MmapTrack::~MmapTrack()
@@ -3570,8 +3648,8 @@
 
 void MmapTrack::appendDumpHeader(String8& result) const
 {
-    result.appendFormat("Client Session Port Id  Format Chn mask  SRate Flags %s\n",
-                        isOut() ? "Usg CT": "Source");
+    result.appendFormat("Client Session Port Id  Format Chn mask  SRate Flags %s  %s\n",
+                        isOut() ? "Usg CT": "Source", isOut() ? "PortVol dB" : "");
 }
 
 void MmapTrack::appendDump(String8& result, bool active __unused) const
@@ -3586,6 +3664,7 @@
             mAttr.flags);
     if (isOut()) {
         result.appendFormat("%3x %2x", mAttr.usage, mAttr.content_type);
+        result.appendFormat("%11.2g", 20.0 * log10(mVolume));
     } else {
         result.appendFormat("%6x", mAttr.source);
     }
diff --git a/services/audioflinger/datapath/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
index 95e9ecc..c2e538c 100644
--- a/services/audioflinger/datapath/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -41,18 +41,20 @@
         AudioStreamOut **ppStreamOut,
         audio_io_handle_t handle,
         audio_devices_t deviceType,
-        audio_output_flags_t flags,
+        audio_output_flags_t *flags,
         struct audio_config *config,
-        const char *address)
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
 
     struct audio_config originalConfig = *config;
-    auto outputStream = new AudioStreamOut(this, flags);
+    auto outputStream = new AudioStreamOut(this);
 
     // Try to open the HAL first using the current format.
     ALOGV("openOutputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
             config->format, config->channel_mask);
-    status_t status = outputStream->open(handle, deviceType, config, address);
+    status_t status = outputStream->open(handle, deviceType, config, flags, address,
+                                        sourceMetadata);
 
     if (status != NO_ERROR) {
         delete outputStream;
@@ -66,18 +68,25 @@
 
         // If the data is encoded then try again using wrapped PCM.
         const bool wrapperNeeded = !audio_has_proportional_frames(originalConfig.format)
-                && ((flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0)
-                && ((flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0);
+                && ((*flags & AUDIO_OUTPUT_FLAG_DIRECT) != 0)
+                && ((*flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) == 0);
 
         if (wrapperNeeded) {
             if (SPDIFEncoder::isFormatSupported(originalConfig.format)) {
-                outputStream = new SpdifStreamOut(this, flags, originalConfig.format);
-                status = outputStream->open(handle, deviceType, &originalConfig, address);
+                outputStream = new SpdifStreamOut(this, originalConfig.format);
+                status = outputStream->open(handle, deviceType, &originalConfig, flags, address,
+                                            sourceMetadata);
                 if (status != NO_ERROR) {
                     ALOGE("ERROR - openOutputStream(), SPDIF open returned %d",
                         status);
                     delete outputStream;
                     outputStream = nullptr;
+                } else {
+                    // on success, we need to assign the actual HAL stream config so that clients
+                    // know and can later patch correctly.
+                    config->format = originalConfig.format;
+                    config->channel_mask = originalConfig.channel_mask;
+                    config->sample_rate = originalConfig.sample_rate;
                 }
             } else {
                 ALOGE("ERROR - openOutputStream(), SPDIFEncoder does not support format 0x%08x",
@@ -151,6 +160,12 @@
                         status);
                     delete inputStream;
                     inputStream = nullptr;
+                } else {
+                    // on success, we need to assign the actual HAL stream config so that clients
+                    // know and can later patch correctly.
+                    config->format = originalConfig.format;
+                    config->channel_mask = originalConfig.channel_mask;
+                    config->sample_rate = originalConfig.sample_rate;
                 }
             } else {
                 ALOGE("ERROR - openInputStream(), SPDIFDecoder does not support format 0x%08x",
diff --git a/services/audioflinger/datapath/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
index 80c1473..6a35b91 100644
--- a/services/audioflinger/datapath/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -85,9 +85,10 @@
             AudioStreamOut **ppStreamOut,
             audio_io_handle_t handle,
             audio_devices_t deviceType,
-            audio_output_flags_t flags,
+            audio_output_flags_t *flags,
             struct audio_config *config,
-            const char *address);
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
 
     status_t openInputStream(
             AudioStreamIn **ppStreamIn,
diff --git a/services/audioflinger/datapath/AudioStreamIn.cpp b/services/audioflinger/datapath/AudioStreamIn.cpp
index 76618f4..165ac25 100644
--- a/services/audioflinger/datapath/AudioStreamIn.cpp
+++ b/services/audioflinger/datapath/AudioStreamIn.cpp
@@ -58,7 +58,7 @@
 
     if (mHalFormatHasProportionalFrames &&
             (flags & AUDIO_INPUT_FLAG_DIRECT) == AUDIO_INPUT_FLAG_DIRECT) {
-        // For DirectRecord reset timestamp to 0 on standby.
+        // For DirectRecord reset position to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesReadAtStandby) ?
                 0 : (halPosition - mFramesReadAtStandby);
         // Scale from HAL sample rate to application rate.
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index aad538f..7aadda3 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -30,9 +30,8 @@
 namespace android {
 
 // ----------------------------------------------------------------------------
-AudioStreamOut::AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags)
+AudioStreamOut::AudioStreamOut(AudioHwDevice *dev)
         : audioHwDev(dev)
-        , flags(flags)
 {
 }
 
@@ -51,42 +50,17 @@
         return NO_INIT;
     }
 
-    uint32_t halPosition = 0;
+    uint64_t halPosition = 0;
     const status_t status = stream->getRenderPosition(&halPosition);
     if (status != NO_ERROR) {
         return status;
     }
-
-    // Maintain a 64-bit render position using the 32-bit result from the HAL.
-    // This delta calculation relies on the arithmetic overflow behavior
-    // of integers. For example (100 - 0xFFFFFFF0) = 116.
-    const auto truncatedPosition = (uint32_t)mRenderPosition;
-    int32_t deltaHalPosition; // initialization not needed, overwitten by __builtin_sub_overflow()
-    (void) __builtin_sub_overflow(halPosition, truncatedPosition, &deltaHalPosition);
-
-    if (deltaHalPosition >= 0) {
-        mRenderPosition += deltaHalPosition;
-    } else if (mExpectRetrograde) {
-        mExpectRetrograde = false;
-        mRenderPosition -= static_cast<uint64_t>(-deltaHalPosition);
-    }
     // Scale from HAL sample rate to application rate.
-    *frames = mRenderPosition / mRateMultiplier;
+    *frames = halPosition / mRateMultiplier;
 
     return status;
 }
 
-// return bottom 32-bits of the render position
-status_t AudioStreamOut::getRenderPosition(uint32_t *frames)
-{
-    uint64_t position64 = 0;
-    const status_t status = getRenderPosition(&position64);
-    if (status == NO_ERROR) {
-        *frames = (uint32_t)position64;
-    }
-    return status;
-}
-
 status_t AudioStreamOut::getPresentationPosition(uint64_t *frames, struct timespec *timestamp)
 {
     if (stream == nullptr) {
@@ -101,7 +75,7 @@
 
     if (mHalFormatHasProportionalFrames &&
             (flags & AUDIO_OUTPUT_FLAG_DIRECT) == AUDIO_OUTPUT_FLAG_DIRECT) {
-        // For DirectTrack reset timestamp to 0 on standby.
+        // For DirectTrack reset position to 0 on standby.
         const uint64_t adjustedPosition = (halPosition <= mFramesWrittenAtStandby) ?
                 0 : (halPosition - mFramesWrittenAtStandby);
         // Scale from HAL sample rate to application rate.
@@ -118,13 +92,16 @@
         audio_io_handle_t handle,
         audio_devices_t deviceType,
         struct audio_config *config,
-        const char *address)
+        audio_output_flags_t *flagsPtr,
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
     sp<StreamOutHalInterface> outStream;
 
-    const audio_output_flags_t customFlags = (config->format == AUDIO_FORMAT_IEC61937)
-                ? (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO)
-                : flags;
+    audio_output_flags_t customFlags = (config->format == AUDIO_FORMAT_IEC61937)
+                ? (audio_output_flags_t)(*flagsPtr | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO)
+                : *flagsPtr;
+    *flagsPtr = flags = customFlags;
 
     int status = hwDev()->openOutputStream(
             handle,
@@ -132,7 +109,8 @@
             customFlags,
             config,
             address,
-            &outStream);
+            &outStream,
+            sourceMetadata);
     ALOGV("AudioStreamOut::open(), HAL returned stream %p, sampleRate %d, format %#x,"
             " channelMask %#x, status %d", outStream.get(), config->sample_rate, config->format,
             config->channel_mask, status);
@@ -149,7 +127,8 @@
                 customFlags,
                 &customConfig,
                 address,
-                &outStream);
+                &outStream,
+                sourceMetadata);
         ALOGV("AudioStreamOut::open(), treat IEC61937 as PCM, status = %d", status);
     }
 
@@ -179,8 +158,6 @@
 
 int AudioStreamOut::flush()
 {
-    mRenderPosition = 0;
-    mExpectRetrograde = false;
     mFramesWritten = 0;
     mFramesWrittenAtStandby = 0;
     const status_t result = stream->flush();
@@ -189,12 +166,14 @@
 
 int AudioStreamOut::standby()
 {
-    mRenderPosition = 0;
-    mExpectRetrograde = false;
     mFramesWrittenAtStandby = mFramesWritten;
     return stream->standby();
 }
 
+void AudioStreamOut::presentationComplete() {
+    stream->presentationComplete();
+}
+
 ssize_t AudioStreamOut::write(const void *buffer, size_t numBytes)
 {
     size_t bytesWritten;
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index ea41bba..1857099 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -37,23 +37,22 @@
 public:
     AudioHwDevice * const audioHwDev;
     sp<StreamOutHalInterface> stream;
-    const audio_output_flags_t flags;
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
 
     [[nodiscard]] sp<DeviceHalInterface> hwDev() const;
 
-    AudioStreamOut(AudioHwDevice *dev, audio_output_flags_t flags);
+    explicit AudioStreamOut(AudioHwDevice *dev);
 
     virtual status_t open(
             audio_io_handle_t handle,
             audio_devices_t deviceType,
             struct audio_config *config,
-            const char *address);
+            audio_output_flags_t *flagsPtr,
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
 
     virtual ~AudioStreamOut();
 
-    // Get the bottom 32-bits of the 64-bit render position.
-    status_t getRenderPosition(uint32_t *frames);
-
     virtual status_t getRenderPosition(uint64_t *frames);
 
     virtual status_t getPresentationPosition(uint64_t *frames, struct timespec *timestamp);
@@ -91,21 +90,14 @@
     virtual status_t flush();
     virtual status_t standby();
 
-    // Avoid suppressing retrograde motion in mRenderPosition for gapless offload/direct when
-    // transitioning between tracks.
-    // The HAL resets the frame position without flush/stop being called, but calls back prior to
-    // this event. So, on the next occurrence of retrograde motion, we permit backwards movement of
-    // mRenderPosition.
-    virtual void presentationComplete() { mExpectRetrograde = true; }
+    virtual void presentationComplete();
 
 protected:
     uint64_t mFramesWritten = 0; // reset by flush
     uint64_t mFramesWrittenAtStandby = 0;
-    uint64_t mRenderPosition = 0; // reset by flush, standby, or presentation complete
     int mRateMultiplier = 1;
     bool mHalFormatHasProportionalFrames = false;
     size_t mHalFrameSize = 0;
-    bool mExpectRetrograde = false; // see presentationComplete
 };
 
 } // namespace android
diff --git a/services/audioflinger/datapath/SpdifStreamIn.cpp b/services/audioflinger/datapath/SpdifStreamIn.cpp
index 98ce712..0090bc5 100644
--- a/services/audioflinger/datapath/SpdifStreamIn.cpp
+++ b/services/audioflinger/datapath/SpdifStreamIn.cpp
@@ -81,6 +81,11 @@
             outputDevice,
             outputDeviceAddress);
 
+    // reset config back to whatever is returned by HAL
+    config->sample_rate = customConfig.sample_rate;
+    config->format = customConfig.format;
+    config->channel_mask = customConfig.channel_mask;
+
     ALOGI("SpdifStreamIn::open() status = %d", status);
 
 #ifdef TEE_SINK
diff --git a/services/audioflinger/datapath/SpdifStreamOut.cpp b/services/audioflinger/datapath/SpdifStreamOut.cpp
index 65a4eec..a565955 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.cpp
+++ b/services/audioflinger/datapath/SpdifStreamOut.cpp
@@ -33,10 +33,8 @@
  * PCM then we need to wrap the data in an SPDIF wrapper.
  */
 SpdifStreamOut::SpdifStreamOut(AudioHwDevice *dev,
-            audio_output_flags_t flags,
             audio_format_t format)
-        // Tell the HAL that the data will be compressed audio wrapped in a data burst.
-        : AudioStreamOut(dev, (audio_output_flags_t) (flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO))
+        : AudioStreamOut(dev)
         , mSpdifEncoder(this, format)
 {
 }
@@ -45,7 +43,9 @@
         audio_io_handle_t handle,
         audio_devices_t devices,
         struct audio_config *config,
-        const char *address)
+        audio_output_flags_t *flags,
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
     struct audio_config customConfig = *config;
 
@@ -62,6 +62,8 @@
 
     customConfig.format = AUDIO_FORMAT_PCM_16_BIT;
     customConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
+    // Tell the HAL that the data will be compressed audio wrapped in a data burst.
+    *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO);
 
     // Always print this because otherwise it could be very confusing if the
     // HAL and AudioFlinger are using different formats.
@@ -75,7 +77,14 @@
             handle,
             devices,
             &customConfig,
-            address);
+            flags,
+            address,
+            sourceMetadata);
+
+    // reset config back to whatever is returned by HAL
+    config->sample_rate = customConfig.sample_rate;
+    config->format = customConfig.format;
+    config->channel_mask = customConfig.channel_mask;
 
     ALOGI("SpdifStreamOut::open() status = %d", status);
 
diff --git a/services/audioflinger/datapath/SpdifStreamOut.h b/services/audioflinger/datapath/SpdifStreamOut.h
index c6d27ba..3241d6f 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.h
+++ b/services/audioflinger/datapath/SpdifStreamOut.h
@@ -36,14 +36,15 @@
 class SpdifStreamOut : public AudioStreamOut {
 public:
 
-    SpdifStreamOut(AudioHwDevice *dev, audio_output_flags_t flags,
-            audio_format_t format);
+    SpdifStreamOut(AudioHwDevice *dev, audio_format_t format);
 
     status_t open(
             audio_io_handle_t handle,
             audio_devices_t devices,
             struct audio_config *config,
-            const char *address) override;
+            audio_output_flags_t *flags,
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata) override;
 
     /**
     * Write audio buffer to driver. Returns number of bytes written, or a
diff --git a/media/libmedia/include/media/CodecServiceRegistrant.h b/services/audioflinger/datapath/VolumePortInterface.h
similarity index 65%
copy from media/libmedia/include/media/CodecServiceRegistrant.h
copy to services/audioflinger/datapath/VolumePortInterface.h
index e0af781..fb1c463 100644
--- a/media/libmedia/include/media/CodecServiceRegistrant.h
+++ b/services/audioflinger/datapath/VolumePortInterface.h
@@ -1,5 +1,5 @@
 /*
- * Copyright (C) 2018 The Android Open Source Project
+ * Copyright (C) 2024 The Android Open Source Project
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -14,10 +14,16 @@
  * limitations under the License.
  */
 
-#ifndef CODEC_SERVICE_REGISTRANT_H_
+#pragma once
 
-#define CODEC_SERVICE_REGISTRANT_H_
+#include <system/audio.h>
 
-typedef void (*RegisterCodecServicesFunc)();
+namespace android {
 
-#endif  // CODEC_SERVICE_REGISTRANT_H_
+class VolumePortInterface : public virtual RefBase {
+public:
+    virtual void setPortVolume(float volume) = 0;
+    virtual float getPortVolume() const = 0;
+};
+
+}  // namespace android
diff --git a/services/audioflinger/sounddose/SoundDoseManager.cpp b/services/audioflinger/sounddose/SoundDoseManager.cpp
index 3b764d1..cdc36dc 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.cpp
+++ b/services/audioflinger/sounddose/SoundDoseManager.cpp
@@ -753,6 +753,10 @@
     }
 }
 
+void SoundDoseManager::resetReferencesForTest() {
+    mMelReporterCallback.clear();
+}
+
 sp<media::ISoundDose> SoundDoseManager::getSoundDoseInterface(
         const sp<media::ISoundDoseCallback>& callback) {
     ALOGV("%s: Register ISoundDoseCallback", __func__);
diff --git a/services/audioflinger/sounddose/SoundDoseManager.h b/services/audioflinger/sounddose/SoundDoseManager.h
index 52a3fd6..8363d9b 100644
--- a/services/audioflinger/sounddose/SoundDoseManager.h
+++ b/services/audioflinger/sounddose/SoundDoseManager.h
@@ -157,6 +157,8 @@
 
     void onMomentaryExposure(float currentMel, audio_port_handle_t deviceId) const override;
 
+    void resetReferencesForTest();
+
 private:
     class SoundDose : public media::BnSoundDose,
                       public IBinder::DeathRecipient {
@@ -229,7 +231,7 @@
 
     mutable std::mutex mLock;
 
-    const sp<IMelReporterCallback> mMelReporterCallback;
+    sp<IMelReporterCallback> mMelReporterCallback;
 
     // no need for lock since MelAggregator is thread-safe
     const sp<audio_utils::MelAggregator> mMelAggregator;
diff --git a/services/audioparameterparser/Android.bp b/services/audioparameterparser/Android.bp
index f5feece..1c1c1e1 100644
--- a/services/audioparameterparser/Android.bp
+++ b/services/audioparameterparser/Android.bp
@@ -35,10 +35,10 @@
     name: "android.hardware.audio.parameter_parser.example_defaults",
     defaults: [
         "latest_android_hardware_audio_core_ndk_shared",
+        "latest_av_audio_types_aidl_ndk_shared",
     ],
 
     shared_libs: [
-        "av-audio-types-aidl-V1-ndk",
         "libbase",
         "libbinder_ndk",
     ],
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index 8f17ffc..edcb805 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -18,6 +18,7 @@
 #define ANDROID_AUDIOPOLICY_INTERFACE_H
 
 #include <android/media/DeviceConnectedState.h>
+#include <android/media/TrackInternalMuteInfo.h>
 #include <media/AudioCommonTypes.h>
 #include <media/AudioContainers.h>
 #include <media/AudioDeviceTypeAddr.h>
@@ -146,7 +147,8 @@
                                       std::vector<audio_io_handle_t> *secondaryOutputs,
                                       output_type_t *outputType,
                                       bool *isSpatialized,
-                                      bool *isBitPerfect) = 0;
+                                      bool *isBitPerfect,
+                                      float *volume) = 0;
     // indicates to the audio policy manager that the output starts being used by corresponding
     // stream.
     virtual status_t startOutput(audio_port_handle_t portId) = 0;
@@ -179,10 +181,16 @@
     // volume control functions
     //
 
+    // notifies the audio policy manager that the absolute volume mode is enabled/disabled on
+    // the passed device. Also specifies the stream that is controlling the absolute volume.
+    virtual status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t device,
+                                                    const char *address,
+                                                    bool enabled,
+                                                    audio_stream_type_t streamToDriveAbs) = 0;
     // initialises stream volume conversion parameters by specifying volume index range.
     virtual void initStreamVolume(audio_stream_type_t stream,
-                                      int indexMin,
-                                      int indexMax) = 0;
+                                  int indexMin,
+                                  int indexMax) = 0;
 
     // sets the new stream volume at a level corresponding to the supplied index for the
     // supplied device. By convention, specifying AUDIO_DEVICE_OUT_DEFAULT_FOR_VOLUME means
@@ -287,8 +295,7 @@
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
                                       audio_port_handle_t *portId,
-                                      uid_t uid,
-                                      bool internal = false) = 0;
+                                      uid_t uid) = 0;
     virtual status_t stopAudioSource(audio_port_handle_t portId) = 0;
 
     virtual status_t setMasterMono(bool mono) = 0;
@@ -471,7 +478,8 @@
                                 audio_config_base_t *mixerConfig,
                                 const sp<DeviceDescriptorBase>& device,
                                 uint32_t *latencyMs,
-                                audio_output_flags_t flags) = 0;
+                                audio_output_flags_t *flags,
+                                audio_attributes_t audioAttributes) = 0;
     // creates a special output that is duplicated to the two outputs passed as arguments.
     // The duplication is performed by a special mixer thread in the AudioFlinger.
     virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
@@ -508,6 +516,18 @@
     // for each output (destination device) it is attached to.
     virtual status_t setStreamVolume(audio_stream_type_t stream, float volume,
                                      audio_io_handle_t output, int delayMs = 0) = 0;
+    /**
+     * Set volume for given AudioTrack port ids for a particular output.
+     * For the same user setting, a given volume group and associated output port id
+     * can have different volumes for each output (destination device) it is attached to.
+     * @param ports to consider
+     * @param volume to apply
+     * @param output to consider
+     * @param delayMs to use
+     * @return NO_ERROR if successful
+     */
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t>& ports, float volume,
+            audio_io_handle_t output, int delayMs = 0) = 0;
 
     // function enabling to send proprietary informations directly from audio policy manager to
     // audio hardware interface.
@@ -585,6 +605,9 @@
     // Get the attributes of the mix port when connecting to the given device port.
     virtual status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                      struct audio_port_v7 *mixPort) = 0;
+
+    virtual status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) = 0;
 };
 
     // These are the signatures of createAudioPolicyManager/destroyAudioPolicyManager
diff --git a/services/audiopolicy/common/include/SpatializerHelper.h b/services/audiopolicy/common/include/SpatializerHelper.h
new file mode 100644
index 0000000..2eb6613
--- /dev/null
+++ b/services/audiopolicy/common/include/SpatializerHelper.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <com_android_media_audio.h>
+#include <cutils/properties.h>
+
+namespace android {
+
+class SpatializerHelper {
+  public:
+    /**
+     * @brief Check if the stereo spatialization feature turned on by:
+     *        - sysprop "ro.audio.stereo_spatialization_enabled" is true
+     *        - com_android_media_audio_stereo_spatialization flag is on
+     *
+     * @return true if the stereo spatialization feature is enabled
+     * @return false if the stereo spatialization feature is not enabled
+     */
+    static bool isStereoSpatializationFeatureEnabled() {
+        static const bool stereoSpatializationEnabled =
+                property_get_bool("ro.audio.stereo_spatialization_enabled", false) &&
+                com_android_media_audio_stereo_spatialization();
+        return stereoSpatializationEnabled;
+    }
+};
+
+} // namespace android
diff --git a/services/audiopolicy/common/include/policy.h b/services/audiopolicy/common/include/policy.h
index 4643bd1..3b7cae3 100644
--- a/services/audiopolicy/common/include/policy.h
+++ b/services/audiopolicy/common/include/policy.h
@@ -17,6 +17,7 @@
 #pragma once
 
 #include <system/audio.h>
+#include <set>
 #include <vector>
 
 #include <media/AudioContainers.h>
@@ -25,18 +26,37 @@
 
 using StreamTypeVector = std::vector<audio_stream_type_t>;
 
+/**
+ * Legacy audio policy product strategies IDs. These strategies are supported by the default
+ * policy engine.
+ */
+enum legacy_strategy {
+    STRATEGY_NONE = -1,
+    STRATEGY_MEDIA,
+    STRATEGY_PHONE,
+    STRATEGY_SONIFICATION,
+    STRATEGY_SONIFICATION_RESPECTFUL,
+    STRATEGY_DTMF,
+    STRATEGY_ENFORCED_AUDIBLE,
+    STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
+    STRATEGY_ACCESSIBILITY,
+    STRATEGY_REROUTING,
+    STRATEGY_CALL_ASSISTANT,
+    STRATEGY_PATCH,
+};
+
 static const audio_attributes_t defaultAttr = AUDIO_ATTRIBUTES_INITIALIZER;
 
+static const std::set<audio_usage_t > gHighPriorityUseCases = {
+        AUDIO_USAGE_ALARM, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE
+};
+
 } // namespace android
 
 static const audio_format_t gDynamicFormat = AUDIO_FORMAT_DEFAULT;
 
 static const uint32_t SONIFICATION_RESPECTFUL_AFTER_MUSIC_DELAY = 5000;
 
-// For mixed output and inputs, the policy will use max mixer sampling rates.
-// Do not limit sampling rate otherwise
-#define SAMPLE_RATE_HZ_MAX 192000
-
 // Used when a client opens a capture stream, without specifying a desired sample rate.
 #define SAMPLE_RATE_HZ_DEFAULT 48000
 
diff --git a/services/audiopolicy/common/managerdefinitions/Android.bp b/services/audiopolicy/common/managerdefinitions/Android.bp
index e8b04ce..4dedcd6 100644
--- a/services/audiopolicy/common/managerdefinitions/Android.bp
+++ b/services/audiopolicy/common/managerdefinitions/Android.bp
@@ -39,9 +39,12 @@
         "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
+        "com.android.media.audioserver-aconfig-cc",
+        "libaconfig_storage_read_api_cc",
         "libaudioclient_aidl_conversion",
         "libaudiofoundation",
         "libaudiopolicy",
+        "libaudioutils",
         "libbase",
         "libcutils",
         "libhidlbase",
@@ -50,15 +53,13 @@
         "libmedia_helper",
         "libutils",
         "libxml2",
+        "server_configurable_flags",
     ],
     export_shared_lib_headers: [
         "libaudiofoundation",
         "libmedia",
         "libmedia_helper",
     ],
-    static_libs: [
-        "libaudioutils",
-    ],
     header_libs: [
         "libaudiopolicycommon",
         "libaudiopolicymanager_interface_headers",
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
index 6167f95..e519766 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
@@ -47,13 +47,17 @@
 
     if (active) {
         // On MMAP IOs, the preferred device is selected by the first client (virtual client
-        // created when the mmap stream is opened). This client is never active.
+        // created when the mmap stream is opened). This client is never active and we only
+        // consider the Filter criteria, not the active state.
         // On non MMAP IOs, the preferred device is honored only if all active clients have
         // a preferred device in which case the first client drives the selection.
         if (desc->isMmap()) {
-            // The client list is never empty on a MMAP IO
-            return devices.getDeviceFromId(
-                    desc->clientsList(false /*activeOnly*/)[0]->preferredDeviceId());
+            auto matchingClients = desc->clientsList(
+                    false /*activeOnly*/, filter, false /*preferredDevice*/);
+            if (matchingClients.empty()) {
+                return nullptr;
+            }
+            return devices.getDeviceFromId(matchingClients[0]->preferredDeviceId());
         } else {
             auto activeClientsWithRoute =
                 desc->clientsList(true /*activeOnly*/, filter, true /*preferredDevice*/);
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index c26ea10..0f2fe24 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -41,7 +41,8 @@
 {
 public:
     AudioInputDescriptor(const sp<IOProfile>& profile,
-                         AudioPolicyClientInterface *clientInterface);
+                         AudioPolicyClientInterface *clientInterface,
+                         bool isPreemptor);
 
     virtual ~AudioInputDescriptor() = default;
 
@@ -127,6 +128,8 @@
     // active use case
     void checkSuspendEffects();
 
+    bool isPreemptor() const { return mIsPreemptor; }
+
  private:
 
     void updateClientRecordingConfiguration(int event, const sp<RecordClientDescriptor>& client);
@@ -145,6 +148,7 @@
     int32_t mGlobalActiveCount = 0;  // non-client-specific activity ref count
     EffectDescriptorCollection mEnabledEffects;
     audio_input_flags_t& mFlags = AudioPortConfig::mFlags.input;
+    bool mIsPreemptor; // true if this input was opened after preemting another one
 };
 
 class AudioInputCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 00958aa..835fad2 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -29,6 +29,7 @@
 #include "ClientDescriptor.h"
 #include "DeviceDescriptor.h"
 #include "PolicyAudioPort.h"
+#include "PreferredMixerAttributesInfo.h"
 #include <vector>
 
 namespace android {
@@ -411,8 +412,9 @@
                       const audio_config_base_t *mixerConfig,
                       const DeviceVector &devices,
                       audio_stream_type_t stream,
-                      audio_output_flags_t flags,
-                      audio_io_handle_t *output);
+                      audio_output_flags_t *flags,
+                      audio_io_handle_t *output,
+                      audio_attributes_t attributes);
 
         // Called when a stream is about to be started
         // Note: called before setClientActive(true);
@@ -477,8 +479,25 @@
 
     PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
 
+    bool isBitPerfect() const {
+        return (getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    /**
+     * Return true if there is any client with the same usage active on the given device.
+     * When the given device is null, return true if there is any client active.
+     */
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     virtual std::string info() const override;
 
+    /**
+     * Finds all ports matching the given volume source.
+     * @param vs to be considered
+     * @return vector of ports following the given volume source.
+     */
+    std::vector<audio_port_handle_t> getPortsForVolumeSource(const VolumeSource& vs);
+
     const sp<IOProfile> mProfile;          // I/O profile this output derives from
     audio_io_handle_t mIoHandle;           // output handle
     uint32_t mLatency;                  //
@@ -489,7 +508,7 @@
     audio_session_t mDirectClientSession; // session id of the direct output client
     bool mPendingReopenToQueryProfiles = false;
     audio_channel_mask_t mMixerChannelMask = AUDIO_CHANNEL_NONE;
-    bool mUsePreferredMixerAttributes = false;
+    sp<PreferredMixerAttributesInfo> mPreferredAttrInfo = nullptr;
 };
 
 // Audio output driven by an input device directly.
@@ -616,6 +635,8 @@
      */
     bool isAnyDeviceTypeActive(const DeviceTypeSet& deviceTypes) const;
 
+    bool isUsageActiveOnDevice(audio_usage_t usage, sp<DeviceDescriptor> device) const;
+
     void dump(String8 *dst) const;
 };
 
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
index 1f6002f..b193cb8 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioPolicyConfig.h
@@ -16,10 +16,10 @@
 
 #pragma once
 
+#include <optional>
 #include <string>
 #include <unordered_map>
 #include <unordered_set>
-#include <vector>
 
 #include <DeviceDescriptor.h>
 #include <HwModule.h>
@@ -49,6 +49,7 @@
     static const constexpr char* const kDefaultConfigSource = "AudioPolicyConfig::setDefault";
     // The suffix of the "engine default" implementation shared library name.
     static const constexpr char* const kDefaultEngineLibraryNameSuffix = "default";
+    static const constexpr char* const kCapEngineLibraryNameSuffix = "configurable";
 
     // Creates the default (fallback) configuration.
     static sp<const AudioPolicyConfig> createDefault();
@@ -140,6 +141,12 @@
 
     void setDefault();
 
+    void setUseDeepBufferForMediaOverrideForTests(bool useDeepBufferForMedia)
+    {
+        mUseDeepBufferForMediaOverride = useDeepBufferForMedia;
+    }
+    bool useDeepBufferForMedia() const;
+
 private:
     friend class sp<AudioPolicyConfig>;
 
@@ -157,6 +164,7 @@
     sp<DeviceDescriptor> mDefaultOutputDevice;
     bool mIsCallScreenModeSupported = false;
     SurroundFormats mSurroundFormats;
+    std::optional<bool> mUseDeepBufferForMediaOverride;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h b/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
index f84bda7..5fb0ad4 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioProfileVectorHelper.h
@@ -38,11 +38,40 @@
 void appendAudioProfiles(AudioProfileVector &audioProfileVector,
                          const AudioProfileVector &audioProfileVectorToAppend);
 
+/**
+ * Check if the profile vector contains a profile that matches the given sampling rate, channel
+ * mask and format. Note that this method uses `audio_formats_match` from policy.h, which will
+ * consider PCM formats match if their bytes per sample are greater than 2.
+ *
+ * @param audioProfileVector
+ * @param samplingRate
+ * @param channelMask
+ * @param format
+ * @return NO_ERROR if the given profile vector is empty or it contains a profile that matches the
+ *         given sampling rate, channel mask and format. Otherwise, returns BAD_VALUE.
+ */
 status_t checkExactProfile(const AudioProfileVector &audioProfileVector,
                            const uint32_t samplingRate,
                            audio_channel_mask_t channelMask,
                            audio_format_t format);
 
+/**
+ * Check if the profile vector contains a profile that has exactly the same sampling rate, channel
+ * mask and format as the given values.
+ *
+ * @param audioProfileVector
+ * @param samplingRate
+ * @param channelMask
+ * @param format
+ * @return NO_ERROR if the given profile vector is empty or it contains a profile that that has
+ *         exactly the same sampling rate, channel mask and format as the given values. Otherwise,
+ *         returns BAD_VALUE.
+ */
+status_t checkIdenticalProfile(const AudioProfileVector &audioProfileVector,
+                               const uint32_t samplingRate,
+                               audio_channel_mask_t channelMask,
+                               audio_format_t format);
+
 status_t checkCompatibleProfile(const AudioProfileVector &audioProfileVector,
                                 uint32_t &samplingRate,
                                 audio_channel_mask_t &channelMask,
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index fe90a1e..60da405 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -63,6 +63,8 @@
      * HW Audio Source.
      */
     virtual bool isInternal() const { return false; }
+    virtual bool isCallRx() const { return false; }
+    virtual bool isCallTx() const { return false; }
     audio_port_handle_t portId() const { return mPortId; }
     uid_t uid() const { return mUid; }
     audio_session_t session() const { return mSessionId; };
@@ -165,6 +167,18 @@
         mIsInvalid = true;
     }
 
+    bool getInternalMute() const { return mInternalMute; }
+
+    /**
+     * Set the internal mute for a client. Return true if the existing value is different from
+     * the given value.
+     */
+    bool setInternalMute(bool muted) {
+        const bool result = (mInternalMute != muted);
+        mInternalMute = muted;
+        return result;
+    }
+
 private:
     const audio_stream_type_t mStream;
     const product_strategy_t mStrategy;
@@ -178,6 +192,7 @@
      */
     uint32_t mActivityCount = 0;
     bool mIsInvalid = false;
+    bool mInternalMute = false;
 };
 
 class RecordClientDescriptor: public ClientDescriptor
@@ -223,7 +238,7 @@
                            const sp<DeviceDescriptor>& srcDevice,
                            audio_stream_type_t stream, product_strategy_t strategy,
                            VolumeSource volumeSource,
-                           bool isInternal);
+                           bool isInternal, bool isCallRx, bool isCallTx);
 
     ~SourceClientDescriptor() override = default;
 
@@ -250,6 +265,8 @@
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
     bool isInternal() const override { return mIsInternal; }
+    bool isCallRx() const override { return mIsCallRx; }
+    bool isCallTx() const override { return mIsCallTx; }
 
     using ClientDescriptor::dump;
     void dump(String8 *dst, int spaces) const override;
@@ -281,6 +298,8 @@
      * requester to prevent rerouting SwOutput involved in raw patches.
      */
     bool mIsInternal = false;
+    bool mIsCallRx = false;
+    bool mIsCallTx = false;
 };
 
 class SourceClientCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index c502fc2..7002e63 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -282,6 +282,11 @@
 
     const AudioProfileVector& getSupportedProfiles() { return mSupportedProfiles; }
 
+    /**
+     * @brief checks if all devices in device vector are attached to the HwModule or not
+     * @return true if all the devices in device vector are attached, otherwise false
+     */
+    bool areAllDevicesAttached() const;
     // Return a string to describe the DeviceVector. The sensitive information will only be
     // added to the string if `includeSensitiveInfo` is true.
     std::string toString(bool includeSensitiveInfo = false) const;
diff --git a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
index b92cd70..f7b9b33 100644
--- a/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/EffectDescriptor.h
@@ -99,21 +99,20 @@
      * @return ioHandle if found, AUDIO_IO_HANDLE_NONE otherwise.
      */
     audio_io_handle_t getIoForSession(audio_session_t sessionId,
-                                      const effect_uuid_t *effectType = nullptr) const;
-    bool hasOrphansForSession(audio_session_t sessionId) const;
-    EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
-    void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
+                                      const effect_uuid_t* effectType = nullptr) const;
 
     /**
-     * @brief Checks if there is at least one orphan effect with given sessionId and effect type
-     * uuid.
+     * @brief Checks if there is at least one orphan effect with given sessionId and optional effect
+     * type uuid.
      * @param sessionId Session ID.
-     * @param effectType Effect type UUID, the implementation will be same as hasOrphansForSession
-     * if null.
+     * @param effectType Optional effect type UUID pointer to effect_uuid_t, nullptr by default.
      * @return True if there is an orphan effect for given sessionId and type UUID, false otherwise.
      */
-    bool hasOrphanEffectsForSessionAndType(audio_session_t sessionId,
-                                           const effect_uuid_t* effectType) const;
+    bool hasOrphansForSession(audio_session_t sessionId,
+                              const effect_uuid_t* effectType = nullptr) const;
+
+    EffectDescriptorCollection getOrphanEffectsForSession(audio_session_t sessionId) const;
+    void dump(String8 *dst, int spaces = 0, bool verbose = true) const;
 
 private:
     status_t setEffectEnabled(const sp<EffectDescriptor> &effectDesc, bool enabled);
diff --git a/services/audiopolicy/common/managerdefinitions/include/HwModule.h b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
index d206637..26bb94f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/HwModule.h
+++ b/services/audiopolicy/common/managerdefinitions/include/HwModule.h
@@ -137,6 +137,7 @@
 class HwModuleCollection : public Vector<sp<HwModule> >
 {
 public:
+    sp<HwModule> getModuleFromHandle(audio_module_handle_t handle) const;
     sp<HwModule> getModuleFromName(const char *name) const;
 
     /**
diff --git a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
index acf787b..6b21e9f 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PolicyAudioPort.h
@@ -56,9 +56,14 @@
     // Audio port IDs are in a different namespace than AudioFlinger unique IDs
     static audio_port_handle_t getNextUniqueId();
 
-    // searches for an exact match
+    // searches for an exact match, note that this method use `audio_formats_match` from policy.h,
+    // which will consider PCM formats match if their bytes per sample are greater than 2.
     virtual status_t checkExactAudioProfile(const struct audio_port_config *config) const;
 
+    // searches for an identical match, unlike `checkExactAudioProfile` above, this will also
+    // require the formats to be exactly the same.
+    virtual status_t checkIdenticalAudioProfile(const struct audio_port_config *config) const;
+
     // searches for a compatible match, currently implemented for input
     // parameters are input|output, returned value is the best match.
     status_t checkCompatibleAudioProfile(uint32_t &samplingRate,
@@ -100,6 +105,12 @@
                          const ChannelMaskSet &channelMasks) const;
     void pickSamplingRate(uint32_t &rate, const SampleRateSet &samplingRates) const;
 
+    status_t checkAudioProfile(const struct audio_port_config *config,
+                               std::function<status_t(const AudioProfileVector&,
+                                                      const uint32_t samplingRate,
+                                                      audio_channel_mask_t,
+                                                      audio_format_t)> checkProfile) const;
+
     sp<HwModule> mModule;     // audio HW module exposing this I/O stream
     AudioRouteVector mRoutes; // Routes involving this port
 };
diff --git a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
index 9472481..a493e3c 100644
--- a/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
+++ b/services/audiopolicy/common/managerdefinitions/include/PreferredMixerAttributesInfo.h
@@ -44,6 +44,17 @@
 
     void increaseActiveClient() { mActiveClientsCount++; }
     void decreaseActiveClient() { mActiveClientsCount--; }
+    void resetActiveClient() { mActiveClientsCount = 0; }
+
+    bool isBitPerfect() const {
+        return (getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE;
+    }
+
+    bool configMatches(const audio_config_t& config) const {
+        return config.format == mMixerAttributes.config.format &&
+                config.channel_mask == mMixerAttributes.config.channel_mask &&
+                config.sample_rate == mMixerAttributes.config.sample_rate;
+    }
 
     void dump(String8 *dst);
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 44f84b9..5a0fd97 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -30,9 +30,10 @@
 namespace android {
 
 AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile,
-                                           AudioPolicyClientInterface *clientInterface)
+                                           AudioPolicyClientInterface *clientInterface,
+                                           bool isPreemptor)
     : mProfile(profile)
-    ,  mClientInterface(clientInterface)
+    ,  mClientInterface(clientInterface), mIsPreemptor(isPreemptor)
 {
     if (profile != NULL) {
         profile->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
@@ -275,6 +276,9 @@
                             "%s invalid profile active count %u",
                             __func__, mProfile->curActiveCount);
         mProfile->curActiveCount--;
+        // allow preemption again now that at least one client was able to
+        // capture on this input
+        mIsPreemptor = false;
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 6537a00..2c41de4 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -27,6 +27,7 @@
 #include "HwModule.h"
 #include "TypeConverter.h"
 #include "policy.h"
+#include <com_android_media_audioserver.h>
 #include <media/AudioGain.h>
 #include <media/AudioParameter.h>
 #include <media/AudioPolicy.h>
@@ -34,6 +35,8 @@
 // A device mask for all audio output devices that are considered "remote" when evaluating
 // active output devices in isStreamActiveRemotely()
 
+namespace audioserver_flags = com::android::media::audioserver;
+
 namespace android {
 
 static const DeviceTypeSet& getAllOutRemoteDevices() {
@@ -498,17 +501,33 @@
         const DeviceTypeSet& deviceTypes, uint32_t delayMs) {
     // volume source active and more than one volume source is active, otherwise, no-op or let
     // setVolume controlling SW and/or HW Gains
-    if (!streamTypes.empty() && isActive(vs) && (getActiveVolumeSources().size() > 1)) {
-        for (const auto& devicePort : devices()) {
-            if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
+    if (!audioserver_flags::portid_volume_management()) {
+        if (!streamTypes.empty() && isActive(vs) && (getActiveVolumeSources().size() > 1)) {
+            for (const auto& devicePort : devices()) {
+                if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
                     devicePort->hasGainController(true /*canUseForVolume*/)) {
-                float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
-                ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
-                      mIoHandle, vs, muted, getActiveVolumeSources().size());
-                for (const auto &stream : streamTypes) {
-                    mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                    float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
+                    ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
+                          mIoHandle, vs, muted, getActiveVolumeSources().size());
+                    for (const auto &stream : streamTypes) {
+                        mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                    }
+                    return;
                 }
-                return;
+            }
+        }
+    } else {
+        if (isActive(vs) && (getActiveVolumeSources().size() > 1)) {
+            for (const auto &devicePort: devices()) {
+                if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
+                    devicePort->hasGainController(true /*canUseForVolume*/)) {
+                    float volumeAmpl = muted ? 0.0f : Volume::DbToAmpl(0);
+                    ALOGV("%s: output: %d, vs: %d, muted: %d, active vs count: %zu", __func__,
+                          mIoHandle, vs, muted, getActiveVolumeSources().size());
+                    mClientInterface->setPortsVolume(
+                            getPortsForVolumeSource(vs), volumeAmpl, mIoHandle, delayMs);
+                    return;
+                }
             }
         }
     }
@@ -524,6 +543,20 @@
     StreamTypeVector streams = streamTypes;
     if (!AudioOutputDescriptor::setVolume(
             volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
+        if (hasStream(streamTypes, AUDIO_STREAM_BLUETOOTH_SCO)) {
+            VolumeSource callVolSrc = getVoiceSource();
+            if (callVolSrc != VOLUME_SOURCE_NONE && volumeDb != getCurVolume(callVolSrc)) {
+                setCurVolume(callVolSrc, volumeDb, true);
+                float volumeAmpl = Volume::DbToAmpl(volumeDb);
+                if (audioserver_flags::portid_volume_management()) {
+                    mClientInterface->setPortsVolume(getPortsForVolumeSource(callVolSrc),
+                            volumeAmpl, mIoHandle, delayMs);
+                } else {
+                    mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL,
+                            volumeAmpl, mIoHandle, delayMs);
+                }
+            }
+        }
         return false;
     }
     if (streams.empty()) {
@@ -531,25 +564,34 @@
     }
     for (const auto& devicePort : devices()) {
         // APM loops on all group, so filter on active group to set the port gain,
-        // let the other groups set the stream volume as per legacy
+        // let the other groups set the sw volume as per legacy
         // TODO: Pass in the device address and check against it.
         if (isSingleDeviceType(deviceTypes, devicePort->type()) &&
                 devicePort->hasGainController(true) && isActive(vs)) {
             ALOGV("%s: device %s has gain controller", __func__, devicePort->toString().c_str());
             // @todo: here we might be in trouble if the SwOutput has several active clients with
             // different Volume Source (or if we allow several curves within same volume group)
-            //
-            // @todo: default stream volume to max (0) when using HW Port gain?
-            // Allows to set SW Gain on AudioFlinger if:
-            //    -volume group has explicit stream(s) associated
-            //    -volume group with no explicit stream(s) is the only active source on this output
-            // Allows to mute SW Gain on AudioFlinger only for volume group with explicit stream(s)
-            if (!streamTypes.empty() || (getActiveVolumeSources().size() == 1)) {
-                const bool canMute = muted && (volumeDb != 0.0f) && !streamTypes.empty();
-                float volumeAmpl = canMute ? 0.0f : Volume::DbToAmpl(0);
-                for (const auto &stream : streams) {
-                    mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+            if (!audioserver_flags::portid_volume_management()) {
+                // @todo: default stream volume to max (0) when using HW Port gain?
+                // Allows to set SW Gain on AudioFlinger if:
+                //    -volume group has explicit stream(s) associated
+                //    -volume group with no explicit stream(s) is the only active source on this
+                //    output
+                // Allows to mute SW Gain on AudioFlinger only for volume group with explicit
+                // stream(s)
+                if (!streamTypes.empty() || (getActiveVolumeSources().size() == 1)) {
+                    const bool canMute = muted && (volumeDb != 0.0f) && !streamTypes.empty();
+                    float volumeAmpl = canMute ? 0.0f : Volume::DbToAmpl(0);
+                    for (const auto &stream: streams) {
+                        mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+                    }
                 }
+            } else {
+                float volumeAmpl = (muted && volumeDb != 0.0f) ? 0.0f : Volume::DbToAmpl(0);
+                ALOGV("%s: output: %d, vs: %d, active vs count: %zu", __func__,
+                      mIoHandle, vs, getActiveVolumeSources().size());
+                mClientInterface->setPortsVolume(
+                        getPortsForVolumeSource(vs), volumeAmpl, mIoHandle, delayMs);
             }
             AudioGains gains = devicePort->getGains();
             int gainMinValueInMb = gains[0]->getMinValueInMb();
@@ -561,6 +603,7 @@
             audio_port_config config = {};
             devicePort->toAudioPortConfig(&config);
             config.config_mask = AUDIO_PORT_CONFIG_GAIN;
+            config.gain.mode = gains[0]->getMode();
             config.gain.values[0] = gainValueMb;
             return mClientInterface->setAudioPortConfig(&config, 0) == NO_ERROR;
         }
@@ -568,26 +611,54 @@
     // Force VOICE_CALL to track BLUETOOTH_SCO stream volume when bluetooth audio is enabled
     float volumeAmpl = Volume::DbToAmpl(getCurVolume(vs));
     if (hasStream(streams, AUDIO_STREAM_BLUETOOTH_SCO)) {
-        mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle, delayMs);
         VolumeSource callVolSrc = getVoiceSource();
+        if (audioserver_flags::portid_volume_management()) {
+            if (callVolSrc != VOLUME_SOURCE_NONE) {
+                mClientInterface->setPortsVolume(getPortsForVolumeSource(callVolSrc), volumeAmpl,
+                        mIoHandle, delayMs);
+            }
+        } else {
+            mClientInterface->setStreamVolume(AUDIO_STREAM_VOICE_CALL, volumeAmpl, mIoHandle,
+                    delayMs);
+        }
         if (callVolSrc != VOLUME_SOURCE_NONE) {
             setCurVolume(callVolSrc, getCurVolume(vs), true);
         }
     }
-    for (const auto &stream : streams) {
-        ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
-              mIoHandle, vs, volumeDb, delayMs, toString(stream).c_str());
-        mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+    if (audioserver_flags::portid_volume_management()) {
+        ALOGV("%s output %d for volumeSource %d, volume %f, delay %d active=%d", __func__,
+              mIoHandle, vs, volumeDb, delayMs, isActive(vs));
+        mClientInterface->setPortsVolume(getPortsForVolumeSource(vs), volumeAmpl, mIoHandle,
+                                         delayMs);
+    } else {
+        for (const auto &stream : streams) {
+            ALOGV("%s output %d for volumeSource %d, volume %f, delay %d stream=%s", __func__,
+                  mIoHandle, vs, volumeDb, delayMs, toString(stream).c_str());
+            mClientInterface->setStreamVolume(stream, volumeAmpl, mIoHandle, delayMs);
+        }
     }
     return true;
 }
 
+std::vector<audio_port_handle_t> SwAudioOutputDescriptor::getPortsForVolumeSource(
+        const VolumeSource& vs)
+{
+    std::vector<audio_port_handle_t> portsForVolumeSource;
+    for (const auto& client : getClientIterable()) {
+        if (client->volumeSource() == vs) {
+            portsForVolumeSource.push_back(client->portId());
+        }
+    }
+    return portsForVolumeSource;
+}
+
 status_t SwAudioOutputDescriptor::open(const audio_config_t *halConfig,
                                        const audio_config_base_t *mixerConfig,
                                        const DeviceVector &devices,
                                        audio_stream_type_t stream,
-                                       audio_output_flags_t flags,
-                                       audio_io_handle_t *output)
+                                       audio_output_flags_t *flags,
+                                       audio_io_handle_t *output,
+                                       audio_attributes_t attributes)
 {
     mDevices = devices;
     sp<DeviceDescriptor> device = devices.getDeviceForOpening();
@@ -615,7 +686,7 @@
     // create a default one
     if ((mProfile->getFlags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) &&
             lHalConfig.offload_info.format == AUDIO_FORMAT_DEFAULT) {
-        flags = (audio_output_flags_t)(flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+        *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
         lHalConfig.offload_info = AUDIO_INFO_INITIALIZER;
         lHalConfig.offload_info.sample_rate = lHalConfig.sample_rate;
         lHalConfig.offload_info.channel_mask = lHalConfig.channel_mask;
@@ -633,7 +704,7 @@
         lMixerConfig = *mixerConfig;
     }
 
-    mFlags = (audio_output_flags_t)(mFlags | flags);
+    mFlags = (audio_output_flags_t)(mFlags | *flags);
 
     // If no mixer config is specified for a spatializer output, default to 5.1 for proper
     // configuration of the final downmixer or spatializer
@@ -651,7 +722,9 @@
                                                    &lMixerConfig,
                                                    device,
                                                    &mLatency,
-                                                   mFlags);
+                                                   &mFlags,
+                                                   attributes);
+    *flags = mFlags;
 
     if (status == NO_ERROR) {
         LOG_ALWAYS_FATAL_IF(*output == AUDIO_IO_HANDLE_NONE,
@@ -791,6 +864,16 @@
     mDevices = devices;
 }
 
+bool SwAudioOutputDescriptor::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    if (device != nullptr && !mDevices.contains(device)) {
+        return false;
+    }
+    return std::any_of(mActiveClients.begin(), mActiveClients.end(),
+                       [usage](sp<TrackClientDescriptor> client) {
+                           return client->attributes().usage == usage; });
+}
+
 // HwAudioOutputDescriptor implementation
 HwAudioOutputDescriptor::HwAudioOutputDescriptor(const sp<SourceClientDescriptor>& source,
                                                  AudioPolicyClientInterface *clientInterface)
@@ -1016,6 +1099,17 @@
     return clientsForStream;
 }
 
+bool SwAudioOutputCollection::isUsageActiveOnDevice(audio_usage_t usage,
+                                                    sp<android::DeviceDescriptor> device) const {
+    for (size_t i = 0; i < this->size(); i++) {
+        const sp<SwAudioOutputDescriptor> outputDesc = this->valueAt(i);
+        if (outputDesc->isUsageActiveOnDevice(usage, device)) {
+            return true;
+        }
+    }
+    return false;
+}
+
 std::string SwAudioOutputDescriptor::info() const {
     std::string result;
     result.append("[" );
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
index 4edd11f..f5e135e 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioPolicyConfig.cpp
@@ -16,6 +16,7 @@
 
 #define LOG_TAG "APM_Config"
 
+#include <android-base/properties.h>
 #include <AudioPolicyConfig.h>
 #include <IOProfile.h>
 #include <Serializer.h>
@@ -269,6 +270,9 @@
     mSurroundFormats = VALUE_OR_RETURN_STATUS(
             aidl2legacy_SurroundSoundConfig_SurroundFormats(aidl.surroundSoundConfig));
     mSource = kAidlConfigSource;
+    if (aidl.engineConfig.capSpecificConfig.has_value()) {
+        setEngineLibraryNameSuffix(kCapEngineLibraryNameSuffix);
+    }
     // No need to augmentData() as AIDL HAL must provide correct mic addresses.
     return NO_ERROR;
 }
@@ -341,4 +345,9 @@
         {AUDIO_FORMAT_AC4, {}}};
 }
 
+bool AudioPolicyConfig::useDeepBufferForMedia() const {
+    if (mUseDeepBufferForMediaOverride.has_value()) return *mUseDeepBufferForMediaOverride;
+    return property_get_bool("audio.deep_buffer.media", false /* default_value */);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
index 82f51ad..164f70a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioProfileVectorHelper.cpp
@@ -190,6 +190,18 @@
     return BAD_VALUE;
 }
 
+status_t checkIdentical(const sp<AudioProfile> &audioProfile,
+                        uint32_t samplingRate,
+                        audio_channel_mask_t channelMask,
+                        audio_format_t format) {
+    if(audioProfile->getFormat() == format &&
+        audioProfile->supportsChannels(channelMask) &&
+        audioProfile->supportsRate(samplingRate)) {
+        return NO_ERROR;
+    }
+    return BAD_VALUE;
+}
+
 status_t checkCompatibleSamplingRate(const sp<AudioProfile> &audioProfile,
                                      uint32_t samplingRate,
                                      uint32_t &updatedSamplingRate)
@@ -320,23 +332,43 @@
     return bestMatch > 0 ? NO_ERROR : BAD_VALUE;
 }
 
-status_t checkExactProfile(const AudioProfileVector& audioProfileVector,
-                           const uint32_t samplingRate,
-                           audio_channel_mask_t channelMask,
-                           audio_format_t format)
-{
+namespace {
+
+status_t checkProfile(const AudioProfileVector& audioProfileVector,
+                      const uint32_t samplingRate,
+                      audio_channel_mask_t channelMask,
+                      audio_format_t format,
+                      std::function<status_t(const sp<AudioProfile> &, uint32_t,
+                                             audio_channel_mask_t, audio_format_t)> check) {
     if (audioProfileVector.empty()) {
         return NO_ERROR;
     }
 
     for (const auto& profile : audioProfileVector) {
-        if (checkExact(profile, samplingRate, channelMask, format) == NO_ERROR) {
+        if (check(profile, samplingRate, channelMask, format) == NO_ERROR) {
             return NO_ERROR;
         }
     }
     return BAD_VALUE;
 }
 
+} // namespace
+
+status_t checkExactProfile(const AudioProfileVector& audioProfileVector,
+                           const uint32_t samplingRate,
+                           audio_channel_mask_t channelMask,
+                           audio_format_t format)
+{
+    return checkProfile(audioProfileVector, samplingRate, channelMask, format, checkExact);
+}
+
+status_t checkIdenticalProfile(const AudioProfileVector &audioProfileVector,
+                               const uint32_t samplingRate,
+                               audio_channel_mask_t channelMask,
+                               audio_format_t format) {
+    return checkProfile(audioProfileVector, samplingRate, channelMask, format, checkIdentical);
+}
+
 status_t checkCompatibleProfile(const AudioProfileVector &audioProfileVector,
                                 uint32_t &samplingRate,
                                 audio_channel_mask_t &channelMask,
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 2aee501..ad6977b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -57,8 +57,8 @@
 void TrackClientDescriptor::dump(String8 *dst, int spaces) const
 {
     ClientDescriptor::dump(dst, spaces);
-    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d\n", spaces, "",
-            mStream, mFlags, mActivityCount);
+    dst->appendFormat("%*sStream: %d; Flags: %08x; Refcount: %d; InternalMute: %s\n",
+            spaces, "", mStream, mFlags, mActivityCount, mInternalMute ? "Yes" : "No");
     dst->appendFormat("%*sDAP Primary Mix: %p\n", spaces, "", mPrimaryMix.promote().get());
     if (!mSecondaryOutputs.empty()) {
         dst->appendFormat("%*sDAP Secondary Outputs: ", spaces - 2, "");
@@ -96,12 +96,14 @@
 SourceClientDescriptor::SourceClientDescriptor(audio_port_handle_t portId, uid_t uid,
          audio_attributes_t attributes, const struct audio_port_config &config,
          const sp<DeviceDescriptor>& srcDevice, audio_stream_type_t stream,
-         product_strategy_t strategy, VolumeSource volumeSource, bool isInternal) :
+         product_strategy_t strategy, VolumeSource volumeSource,
+         bool isInternal, bool isCallRx, bool isCallTx) :
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
         {} /* Sources do not support secondary outputs*/, nullptr),
-    mSrcDevice(srcDevice), mIsInternal(isInternal)
+    mSrcDevice(srcDevice), mIsInternal(isInternal),
+    mIsCallRx(isCallRx), mIsCallTx(isCallTx)
 {
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 9f7b8fc..46a04de 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -541,4 +541,14 @@
     return filteredDevices;
 }
 
+bool DeviceVector::areAllDevicesAttached() const
+{
+    for (const auto &device : *this) {
+        if (!device->isAttached()) {
+            return false;
+        }
+    }
+    return true;
+}
+
 } // namespace android
diff --git a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
index 7971b61..090da6c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/EffectDescriptor.cpp
@@ -210,27 +210,13 @@
     }
 }
 
-bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId) const
-{
+bool EffectDescriptorCollection::hasOrphansForSession(audio_session_t sessionId,
+                                                      const effect_uuid_t* effectType) const {
     for (size_t i = 0; i < size(); ++i) {
         sp<EffectDescriptor> effect = valueAt(i);
-        if (effect->mSession == sessionId && effect->mIsOrphan) {
-            return true;
-        }
-    }
-    return false;
-}
-
-bool EffectDescriptorCollection::hasOrphanEffectsForSessionAndType(
-        audio_session_t sessionId, const effect_uuid_t* effectType) const {
-    if (effectType == nullptr) {
-        return hasOrphansForSession(sessionId);
-    }
-
-    for (size_t i = 0; i < size(); ++i) {
-        sp<EffectDescriptor> effect = valueAt(i);
-        if (effect->mIsOrphan && effect->mSession == sessionId &&
-            memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0) {
+        if (effect->mSession == sessionId && effect->mIsOrphan &&
+            (effectType == nullptr ||
+             memcmp(&effect->mDesc.type, effectType, sizeof(effect_uuid_t)) == 0)) {
             return true;
         }
     }
diff --git a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
index 6696b45..2d8231a 100644
--- a/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/HwModule.cpp
@@ -283,6 +283,16 @@
     dumpAudioRouteVector(mRoutes, dst, spaces);
 }
 
+sp<HwModule> HwModuleCollection::getModuleFromHandle(audio_module_handle_t handle) const
+{
+    for (const auto& module : *this) {
+        if (module->getHandle() == handle) {
+            return module;
+        }
+    }
+    return nullptr;
+}
+
 sp <HwModule> HwModuleCollection::getModuleFromName(const char *name) const
 {
     for (const auto& module : *this) {
diff --git a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
index d9fbd89..991b103 100644
--- a/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/IOProfile.cpp
@@ -73,7 +73,7 @@
     if (isRecordThread)
     {
         if ((flags & AUDIO_INPUT_FLAG_MMAP_NOIRQ) != 0) {
-            if (checkExactAudioProfile(&config) != NO_ERROR) {
+            if (checkIdenticalAudioProfile(&config) != NO_ERROR) {
                 return result;
             }
             result = EXACT_MATCH;
@@ -86,7 +86,13 @@
             return result;
         }
     } else {
-        if (checkExactAudioProfile(&config) == NO_ERROR) {
+        if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0 ||
+            (flags & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != 0) {
+            if (checkIdenticalAudioProfile(&config) != NO_ERROR) {
+                return result;
+            }
+            result = EXACT_MATCH;
+        } else if (checkExactAudioProfile(&config) == NO_ERROR) {
             result = EXACT_MATCH;
         } else {
             return result;
diff --git a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
index ce8178f..cd54626 100644
--- a/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/PolicyAudioPort.cpp
@@ -20,6 +20,7 @@
 #include "PolicyAudioPort.h"
 #include "HwModule.h"
 #include <policy.h>
+#include <system/audio.h>
 
 #ifndef ARRAY_SIZE
 #define ARRAY_SIZE(a) (sizeof(a) / sizeof((a)[0]))
@@ -63,21 +64,11 @@
 
 status_t PolicyAudioPort::checkExactAudioProfile(const struct audio_port_config *config) const
 {
-    status_t status = NO_ERROR;
-    auto config_mask = config->config_mask;
-    if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
-        config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
-        status = asAudioPort()->checkGain(&config->gain, config->gain.index);
-        if (status != NO_ERROR) {
-            return status;
-        }
-    }
-    if (config_mask != 0) {
-        // TODO should we check sample_rate / channel_mask / format separately?
-        status = checkExactProfile(asAudioPort()->getAudioProfiles(), config->sample_rate,
-                config->channel_mask, config->format);
-    }
-    return status;
+    return checkAudioProfile(config, checkExactProfile);
+}
+
+status_t PolicyAudioPort::checkIdenticalAudioProfile(const struct audio_port_config *config) const {
+    return checkAudioProfile(config, checkIdenticalProfile);
 }
 
 void PolicyAudioPort::pickSamplingRate(uint32_t &pickedRate,
@@ -266,4 +257,28 @@
             asAudioPort()->getName().c_str(), samplingRate, channelMask, format);
 }
 
+status_t PolicyAudioPort::checkAudioProfile(
+        const struct audio_port_config *config,
+        std::function<status_t(const AudioProfileVector &,
+                               const uint32_t,
+                               audio_channel_mask_t,
+                               audio_format_t)> checkProfile) const {
+    status_t status = NO_ERROR;
+    auto config_mask = config->config_mask;
+    if (config_mask & AUDIO_PORT_CONFIG_GAIN) {
+        config_mask &= ~AUDIO_PORT_CONFIG_GAIN;
+        status = asAudioPort()->checkGain(&config->gain, config->gain.index);
+        if (status != NO_ERROR) {
+            return status;
+        }
+    }
+    if (config_mask != 0) {
+        // TODO should we check sample_rate / channel_mask / format separately?
+        status = checkProfile(asAudioPort()->getAudioProfiles(), config->sample_rate,
+                   config->channel_mask, config->format);
+    }
+    return status;
+
+}
+
 } // namespace android
diff --git a/services/audiopolicy/engine/common/Android.bp b/services/audiopolicy/engine/common/Android.bp
index 9e07d79..7daa064 100644
--- a/services/audiopolicy/engine/common/Android.bp
+++ b/services/audiopolicy/engine/common/Android.bp
@@ -62,4 +62,8 @@
         "com.android.media.audio-aconfig-cc",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+        "latest_android_media_audio_common_types_cpp_static",
+    ],
 }
diff --git a/services/audiopolicy/engine/common/include/EngineBase.h b/services/audiopolicy/engine/common/include/EngineBase.h
index b9c94a4..edb2e29 100644
--- a/services/audiopolicy/engine/common/include/EngineBase.h
+++ b/services/audiopolicy/engine/common/include/EngineBase.h
@@ -129,6 +129,8 @@
 
     product_strategy_t getProductStrategyByName(const std::string &name) const;
 
+    std::string getProductStrategyName(product_strategy_t id) const;
+
     AudioPolicyManagerObserver *getApmObserver() const { return mApmObserver; }
 
     inline bool isInCall() const
diff --git a/services/audiopolicy/engine/common/include/ProductStrategy.h b/services/audiopolicy/engine/common/include/ProductStrategy.h
index 1593be0..8162720 100644
--- a/services/audiopolicy/engine/common/include/ProductStrategy.h
+++ b/services/audiopolicy/engine/common/include/ProductStrategy.h
@@ -46,7 +46,7 @@
     using VolumeGroupAttributesVector = std::vector<VolumeGroupAttributes>;
 
 public:
-    ProductStrategy(const std::string &name);
+    ProductStrategy(const std::string &name, int id = PRODUCT_STRATEGY_NONE);
 
     void addAttributes(const VolumeGroupAttributes &volumeGroupAttributes);
 
diff --git a/services/audiopolicy/engine/common/src/EngineBase.cpp b/services/audiopolicy/engine/common/src/EngineBase.cpp
index e259e6e..976791f 100644
--- a/services/audiopolicy/engine/common/src/EngineBase.cpp
+++ b/services/audiopolicy/engine/common/src/EngineBase.cpp
@@ -116,6 +116,15 @@
     return PRODUCT_STRATEGY_NONE;
 }
 
+std::string EngineBase::getProductStrategyName(product_strategy_t id) const {
+    for (const auto &iter : mProductStrategies) {
+        if (iter.second->getId() == id) {
+            return iter.second->getName();
+        }
+    }
+    return "";
+}
+
 engineConfig::ParsingResult EngineBase::loadAudioPolicyEngineConfig(
         const media::audio::common::AudioHalEngineConfig& aidlConfig)
 {
@@ -240,7 +249,7 @@
         loadVolumeConfig(mVolumeGroups, volumeConfig);
     }
     for (auto& strategyConfig : result.parsedConfig->productStrategies) {
-        sp<ProductStrategy> strategy = new ProductStrategy(strategyConfig.name);
+        sp<ProductStrategy> strategy = new ProductStrategy(strategyConfig.name, strategyConfig.id);
         for (const auto &group : strategyConfig.attributesGroups) {
             const auto &iter = std::find_if(begin(mVolumeGroups), end(mVolumeGroups),
                                          [&group](const auto &volumeGroup) {
diff --git a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
index 548a20d..c4bf64a 100644
--- a/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
+++ b/services/audiopolicy/engine/common/src/EngineDefaultConfig.h
@@ -18,14 +18,17 @@
 
 #include <EngineConfig.h>
 
+#include <media/AudioProductStrategy.h>
+#include <policy.h>
 #include <system/audio.h>
 
 namespace android {
+
 /**
  * @brief AudioProductStrategies hard coded array of strategies to fill new engine API contract.
  */
 const engineConfig::ProductStrategies gOrderedStrategies = {
-    {"STRATEGY_PHONE",
+    {"STRATEGY_PHONE", STRATEGY_PHONE,
      {
          {AUDIO_STREAM_VOICE_CALL, "AUDIO_STREAM_VOICE_CALL",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VOICE_COMMUNICATION, AUDIO_SOURCE_DEFAULT,
@@ -37,7 +40,7 @@
          }
      },
     },
-    {"STRATEGY_SONIFICATION",
+    {"STRATEGY_SONIFICATION", STRATEGY_SONIFICATION,
      {
          {AUDIO_STREAM_RING, "AUDIO_STREAM_RING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
@@ -49,7 +52,7 @@
          }
      },
     },
-    {"STRATEGY_ENFORCED_AUDIBLE",
+    {"STRATEGY_ENFORCED_AUDIBLE", STRATEGY_ENFORCED_AUDIBLE,
      {
          {AUDIO_STREAM_ENFORCED_AUDIBLE, "AUDIO_STREAM_ENFORCED_AUDIBLE",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
@@ -57,7 +60,7 @@
          }
      },
     },
-    {"STRATEGY_ACCESSIBILITY",
+    {"STRATEGY_ACCESSIBILITY", STRATEGY_ACCESSIBILITY,
      {
          {AUDIO_STREAM_ACCESSIBILITY, "AUDIO_STREAM_ACCESSIBILITY",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY,
@@ -65,7 +68,7 @@
          }
      },
     },
-    {"STRATEGY_SONIFICATION_RESPECTFUL",
+    {"STRATEGY_SONIFICATION_RESPECTFUL", STRATEGY_SONIFICATION_RESPECTFUL,
      {
          {AUDIO_STREAM_NOTIFICATION, "AUDIO_STREAM_NOTIFICATION",
           {
@@ -77,7 +80,7 @@
          }
      },
     },
-    {"STRATEGY_MEDIA",
+    {"STRATEGY_MEDIA", STRATEGY_MEDIA,
      {
          {AUDIO_STREAM_ASSISTANT, "AUDIO_STREAM_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_SPEECH, AUDIO_USAGE_ASSISTANT,
@@ -103,7 +106,7 @@
          }
      },
     },
-    {"STRATEGY_DTMF",
+    {"STRATEGY_DTMF", STRATEGY_DTMF,
      {
          {AUDIO_STREAM_DTMF, "AUDIO_STREAM_DTMF",
           {
@@ -113,7 +116,7 @@
          }
      },
     },
-    {"STRATEGY_CALL_ASSISTANT",
+    {"STRATEGY_CALL_ASSISTANT", STRATEGY_CALL_ASSISTANT,
      {
          {AUDIO_STREAM_CALL_ASSISTANT, "AUDIO_STREAM_CALL_ASSISTANT",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_CALL_ASSISTANT, AUDIO_SOURCE_DEFAULT,
@@ -121,7 +124,7 @@
          }
      },
     },
-    {"STRATEGY_TRANSMITTED_THROUGH_SPEAKER",
+    {"STRATEGY_TRANSMITTED_THROUGH_SPEAKER", STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
      {
          {AUDIO_STREAM_TTS, "AUDIO_STREAM_TTS",
           {
@@ -140,7 +143,7 @@
  * For compatibility reason why apm volume config file, volume group name is the stream type.
  */
 const engineConfig::ProductStrategies gOrderedSystemStrategies = {
-    {"STRATEGY_REROUTING",
+    {"STRATEGY_REROUTING", STRATEGY_REROUTING,
      {
          {AUDIO_STREAM_REROUTING, "AUDIO_STREAM_REROUTING",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_VIRTUAL_SOURCE, AUDIO_SOURCE_DEFAULT,
@@ -148,7 +151,7 @@
          }
      },
     },
-    {"STRATEGY_PATCH",
+    {"STRATEGY_PATCH", STRATEGY_PATCH,
      {
          {AUDIO_STREAM_PATCH, "AUDIO_STREAM_PATCH",
           {{AUDIO_CONTENT_TYPE_UNKNOWN, AUDIO_USAGE_UNKNOWN, AUDIO_SOURCE_DEFAULT,
diff --git a/services/audiopolicy/engine/common/src/ProductStrategy.cpp b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
index 0d25955..8ed7403 100644
--- a/services/audiopolicy/engine/common/src/ProductStrategy.cpp
+++ b/services/audiopolicy/engine/common/src/ProductStrategy.cpp
@@ -19,6 +19,7 @@
 
 #include "ProductStrategy.h"
 
+#include <android/media/audio/common/AudioHalProductStrategy.h>
 #include <media/AudioProductStrategy.h>
 #include <media/TypeConverter.h>
 #include <utils/String8.h>
@@ -30,11 +31,20 @@
 
 namespace android {
 
-ProductStrategy::ProductStrategy(const std::string &name) :
+using media::audio::common::AudioHalProductStrategy;
+
+/*
+ * Note on the id: either is provided (legacy strategies have hard coded id, aidl have
+ * own id, enforced to be started from VENDOR_STRATEGY_ID_START.
+ * REROUTING & PATCH system strategies are added.
+ * To prevent from collision, generate from VENDOR_STRATEGY_ID_START when id is not provided.
+ */
+ProductStrategy::ProductStrategy(const std::string &name, int id) :
     mName(name),
-    mId(static_cast<product_strategy_t>(HandleGenerator<uint32_t>::getNextHandle()))
-{
-}
+    mId((static_cast<product_strategy_t>(id) != PRODUCT_STRATEGY_NONE) ?
+            static_cast<product_strategy_t>(id) :
+            static_cast<product_strategy_t>(AudioHalProductStrategy::VENDOR_STRATEGY_ID_START +
+                    HandleGenerator<uint32_t>::getNextHandle())) {}
 
 void ProductStrategy::addAttributes(const VolumeGroupAttributes &volumeGroupAttributes)
 {
diff --git a/services/audiopolicy/engine/config/Android.bp b/services/audiopolicy/engine/config/Android.bp
index ab2c134..d771605 100644
--- a/services/audiopolicy/engine/config/Android.bp
+++ b/services/audiopolicy/engine/config/Android.bp
@@ -10,6 +10,9 @@
 
 cc_library {
     name: "libaudiopolicyengine_config",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
     export_include_dirs: ["include"],
     include_dirs: [
         "external/libxml2/include",
diff --git a/services/audiopolicy/engine/config/include/EngineConfig.h b/services/audiopolicy/engine/config/include/EngineConfig.h
index 119dbd6..054bdae 100644
--- a/services/audiopolicy/engine/config/include/EngineConfig.h
+++ b/services/audiopolicy/engine/config/include/EngineConfig.h
@@ -71,6 +71,7 @@
 
 struct ProductStrategy {
     std::string name;
+    int id;
     AttributesGroups attributesGroups;
 };
 
diff --git a/services/audiopolicy/engine/config/src/EngineConfig.cpp b/services/audiopolicy/engine/config/src/EngineConfig.cpp
index ca78ce7..714ab78 100644
--- a/services/audiopolicy/engine/config/src/EngineConfig.cpp
+++ b/services/audiopolicy/engine/config/src/EngineConfig.cpp
@@ -20,8 +20,8 @@
 #include <sstream>
 #include <stdarg.h>
 #include <string>
-#include <string>
 #include <vector>
+#include <unordered_map>
 
 #define LOG_TAG "APM::AudioPolicyEngine/Config"
 //#define LOG_NDEBUG 0
@@ -41,6 +41,7 @@
 
 namespace android {
 
+using media::audio::common::AudioStreamType;
 using utilities::convertTo;
 
 namespace engineConfig {
@@ -51,11 +52,39 @@
 
 namespace {
 
+ConversionResult<std::string> aidl2legacy_AudioHalProductStrategy_ProductStrategyType(int id) {
+    using AudioProductStrategyType = media::audio::common::AudioProductStrategyType;
+
+#define STRATEGY_ENTRY(name) {static_cast<int>(AudioProductStrategyType::name), "STRATEGY_" #name}
+    static const std::unordered_map<int, std::string> productStrategyMap = {STRATEGY_ENTRY(MEDIA),
+                            STRATEGY_ENTRY(PHONE),
+                            STRATEGY_ENTRY(SONIFICATION),
+                            STRATEGY_ENTRY(SONIFICATION_RESPECTFUL),
+                            STRATEGY_ENTRY(DTMF),
+                            STRATEGY_ENTRY(ENFORCED_AUDIBLE),
+                            STRATEGY_ENTRY(TRANSMITTED_THROUGH_SPEAKER),
+                            STRATEGY_ENTRY(ACCESSIBILITY)};
+#undef STRATEGY_ENTRY
+
+    if (id >= media::audio::common::AudioHalProductStrategy::VENDOR_STRATEGY_ID_START) {
+        return std::to_string(id);
+    }
+    auto it = productStrategyMap.find(id);
+    if (it == productStrategyMap.end()) {
+        return base::unexpected(BAD_VALUE);
+    }
+    return it->second;
+}
+
 ConversionResult<AttributesGroup> aidl2legacy_AudioHalAttributeGroup_AttributesGroup(
         const media::audio::common::AudioHalAttributesGroup& aidl) {
     AttributesGroup legacy;
-    legacy.stream = VALUE_OR_RETURN(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType));
+    // StreamType may only be set to AudioStreamType.INVALID when using the
+    // Configurable Audio Policy (CAP) engine. An AudioHalAttributesGroup with
+    // AudioStreamType.INVALID is used when the volume group and attributes are
+    // not associated to any AudioStreamType.
+    legacy.stream = ((aidl.streamType == AudioStreamType::INVALID) ? AUDIO_STREAM_DEFAULT :
+            VALUE_OR_RETURN(aidl2legacy_AudioStreamType_audio_stream_type_t(aidl.streamType)));
     legacy.volumeGroup = aidl.volumeGroupName;
     legacy.attributesVect = VALUE_OR_RETURN(convertContainer<AttributesVector>(
                     aidl.attributes, aidl2legacy_AudioAttributes_audio_attributes_t));
@@ -65,7 +94,9 @@
 ConversionResult<ProductStrategy> aidl2legacy_AudioHalProductStrategy_ProductStrategy(
         const media::audio::common::AudioHalProductStrategy& aidl) {
     ProductStrategy legacy;
-    legacy.name = "strategy_" + std::to_string(aidl.id);
+    legacy.name = aidl.name.value_or(VALUE_OR_RETURN(
+                    aidl2legacy_AudioHalProductStrategy_ProductStrategyType(aidl.id)));
+    legacy.id = aidl.id;
     legacy.attributesGroups = VALUE_OR_RETURN(convertContainer<AttributesGroups>(
                     aidl.attributesGroups,
                     aidl2legacy_AudioHalAttributeGroup_AttributesGroup));
@@ -125,7 +156,6 @@
                     aidl.volumeCurves, aidl2legacy_AudioHalVolumeCurve_VolumeCurve));
     return legacy;
 }
-
 }  // namespace
 
 template<typename E, typename C>
@@ -152,6 +182,7 @@
 
     struct Attributes {
         static constexpr const char *name = "name";
+        static constexpr const char *id = "id";
     };
     static android::status_t deserialize(_xmlDoc *doc, const _xmlNode *root, Collection &ps);
 };
@@ -510,13 +541,21 @@
         ALOGE("ProductStrategyTraits No attribute %s found", Attributes::name);
         return BAD_VALUE;
     }
+    int id = PRODUCT_STRATEGY_NONE;
+    std::string idLiteral = getXmlAttribute(child, Attributes::id);
+    if (!idLiteral.empty()) {
+        if (!convertTo(idLiteral, id)) {
+            return BAD_VALUE;
+        }
+        ALOGV("%s: %s, %s = %d", __FUNCTION__, name.c_str(), Attributes::id, id);
+    }
     ALOGV("%s: %s = %s", __FUNCTION__, Attributes::name, name.c_str());
 
     size_t skipped = 0;
     AttributesGroups attrGroups;
     deserializeCollection<AttributesGroupTraits>(doc, child, attrGroups, skipped);
 
-    strategies.push_back({name, attrGroups});
+    strategies.push_back({name, id, attrGroups});
     return NO_ERROR;
 }
 
diff --git a/services/audiopolicy/engineconfigurable/Android.bp b/services/audiopolicy/engineconfigurable/Android.bp
index 66df930..1c98faf 100644
--- a/services/audiopolicy/engineconfigurable/Android.bp
+++ b/services/audiopolicy/engineconfigurable/Android.bp
@@ -36,10 +36,10 @@
         "libbase_headers",
     ],
     static_libs: [
+        "libaudiopolicycapengine_config",
         "libaudiopolicyengine_common",
         "libaudiopolicyengine_config",
         "libaudiopolicyengineconfigurable_pfwwrapper",
-
     ],
     shared_libs: [
         "libaudio_aidl_conversion_common_cpp",
@@ -54,4 +54,15 @@
         "libutils",
         "libxml2",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+    required: [
+        "CapClass.xml",
+        "CapProductStrategies.xml",
+        "CapSubsystem-CommonTypes.xml",
+        "CapSubsystem.xml",
+        "ParameterFrameworkConfigurationCap.xml",
+    ],
 }
diff --git a/services/audiopolicy/engineconfigurable/config/Android.bp b/services/audiopolicy/engineconfigurable/config/Android.bp
index 8dd13e8..95a7cf8 100644
--- a/services/audiopolicy/engineconfigurable/config/Android.bp
+++ b/services/audiopolicy/engineconfigurable/config/Android.bp
@@ -38,6 +38,53 @@
 }
 
 filegroup {
+    name: "audio_policy_engine_aidl_criterion_types_template",
+    srcs: ["example/common/audio_policy_engine_criterion_types_aidl.xml.in"],
+}
+
+filegroup {
     name: "audio_policy_engine_criteria",
     srcs: ["example/common/audio_policy_engine_criteria.xml"],
 }
+
+cc_library_headers {
+    name: "libaudiopolicycapengine_config_headers",
+    export_include_dirs: ["include"],
+}
+
+cc_library {
+    name: "libaudiopolicycapengine_config",
+    defaults: [
+        "latest_android_media_audio_common_types_cpp_shared",
+    ],
+    export_header_lib_headers: [
+        "libaudiopolicycapengine_config_headers",
+    ],
+    include_dirs: [
+        "external/libxml2/include",
+    ],
+    srcs: [
+        "src/CapEngineConfig.cpp",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wextra",
+    ],
+    shared_libs: [
+        "libaudio_aidl_conversion_common_cpp",
+        "libaudiopolicycomponents",
+        "libaudiopolicyengine_config",
+        "libcutils",
+        "liblog",
+        "libmedia_helper",
+        "libutils",
+        "libxml2",
+    ],
+    header_libs: [
+        "libaudio_system_headers",
+        "libaudioclient_headers",
+        "libaudiopolicycapengine_config_headers",
+        "libmedia_headers",
+    ],
+}
diff --git a/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in b/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in
new file mode 100644
index 0000000..dc2517b
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/config/example/common/audio_policy_engine_criterion_types_aidl.xml.in
@@ -0,0 +1,96 @@
+<?xml version='1.0' encoding='UTF-8'?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<criterion_types>
+    <criterion_type name="OutputDevicesMaskType" type="inclusive"/>
+    <criterion_type name="InputDevicesMaskType" type="inclusive"/>
+    <criterion_type name="OutputDevicesAddressesType" type="inclusive">
+        <values>
+            <!-- legacy remote submix -->
+            <value literal="0" numerical="1"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="InputDevicesAddressesType" type="inclusive">
+        <values>
+            <!-- legacy remote submix -->
+            <value literal="0" numerical="1"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="AndroidModeType" type="exclusive"/>
+    <criterion_type name="ForceUseForCommunicationType" type="exclusive">
+        <values>
+            <value literal="NONE" numerical="0"/>
+            <value literal="SPEAKER" numerical="1"/>
+            <value literal="BT_SCO" numerical="3"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForMediaType" type="exclusive">
+        <values>
+            <value literal="NONE" numerical="0"/>
+            <value literal="SPEAKER" numerical="1"/>
+            <value literal="HEADPHONES" numerical="2"/>
+            <value literal="BT_A2DP" numerical="4"/>
+            <value literal="WIRED_ACCESSORY" numerical="5"/>
+            <value literal="ANALOG_DOCK" numerical="8"/>
+            <value literal="DIGITAL_DOCK" numerical="9"/>
+            <value literal="NO_BT_A2DP" numerical="10"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForRecordType" type="exclusive">
+        <values>
+            <value literal="NONE" numerical="0"/>
+            <value literal="BT_SCO" numerical="3"/>
+            <value literal="WIRED_ACCESSORY" numerical="5"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForDockType" type="exclusive">
+        <values>
+            <value literal="NONE" numerical="0"/>
+            <value literal="WIRED_ACCESSORY" numerical="5"/>
+            <value literal="BT_CAR_DOCK" numerical="6"/>
+            <value literal="BT_DESK_DOCK" numerical="7"/>
+            <value literal="ANALOG_DOCK" numerical="8"/>
+            <value literal="DIGITAL_DOCK" numerical="9"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForSystemType" type="exclusive" >
+        <values>
+            <value literal="NONE" numerical="0"/>
+            <value literal="SYSTEM_ENFORCED" numerical="11"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForHdmiSystemAudioType" type="exclusive">
+        <values>
+            <value literal="NONE" numerical="0"/>
+            <value literal="HDMI_SYSTEM_AUDIO_ENFORCED" numerical="12"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForEncodedSurroundType" type="exclusive">
+        <values>
+            <value literal="NONE" numerical="0"/>
+            <value literal="ENCODED_SURROUND_NEVER" numerical="13"/>
+            <value literal="ENCODED_SURROUND_ALWAYS" numerical="14"/>
+            <value literal="ENCODED_SURROUND_MANUAL" numerical="15"/>
+        </values>
+    </criterion_type>
+    <criterion_type name="ForceUseForVibrateRingingType" type="exclusive">
+        <values>
+            <value literal="NONE" numerical="0"/>
+            <value literal="BT_SCO" numerical="3"/>
+        </values>
+    </criterion_type>
+</criterion_types>
+
+
diff --git a/services/audiopolicy/engineconfigurable/config/include/CapEngineConfig.h b/services/audiopolicy/engineconfigurable/config/include/CapEngineConfig.h
new file mode 100644
index 0000000..6c55a49
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/config/include/CapEngineConfig.h
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <EngineConfig.h>
+
+#include <android/media/audio/common/AudioHalEngineConfig.h>
+#include <system/audio_policy.h>
+#include <string>
+#include <vector>
+
+namespace android {
+namespace capEngineConfig {
+
+static const char *const gCriterionTypeSuffix = "Type";
+static const char *const gInputDeviceCriterionName = "AvailableInputDevices";
+static const char *const gOutputDeviceCriterionName = "AvailableOutputDevices";
+static const char *const gPhoneStateCriterionName = "TelephonyMode";
+static const char *const gOutputDeviceAddressCriterionName = "AvailableOutputDevicesAddresses";
+static const char *const gInputDeviceAddressCriterionName = "AvailableInputDevicesAddresses";
+
+/**
+* Order MUST be aligned with definition of audio_policy_force_use_t within audio_policy.h
+*/
+static const char *const gForceUseCriterionTag[AUDIO_POLICY_FORCE_USE_CNT] =
+{
+    [AUDIO_POLICY_FORCE_FOR_COMMUNICATION] =        "ForceUseForCommunication",
+    [AUDIO_POLICY_FORCE_FOR_MEDIA] =                "ForceUseForMedia",
+    [AUDIO_POLICY_FORCE_FOR_RECORD] =               "ForceUseForRecord",
+    [AUDIO_POLICY_FORCE_FOR_DOCK] =                 "ForceUseForDock",
+    [AUDIO_POLICY_FORCE_FOR_SYSTEM] =               "ForceUseForSystem",
+    [AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO] =    "ForceUseForHdmiSystemAudio",
+    [AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND] =     "ForceUseForEncodedSurround",
+    [AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING] =      "ForceUseForVibrateRinging"
+};
+
+using ParameterValues = std::vector<std::string>;
+
+struct ConfigurableElement {
+    std::string path;
+};
+
+struct ConfigurableElementValue {
+    ConfigurableElement configurableElement;
+    std::string value;
+};
+using ConfigurableElementValues = std::vector<ConfigurableElementValue>;
+
+struct CapSetting {
+    std::string configurationName;
+    ConfigurableElementValues configurableElementValues;
+};
+using CapSettings = std::vector<CapSetting>;
+
+struct CapConfiguration {
+    std::string name;
+    std::string rule;
+};
+
+using ConfigurableElementPaths = std::vector<std::string>;
+using CapConfigurations = std::vector<CapConfiguration>;
+
+struct CapConfigurableDomain {
+    std::string name;
+    CapConfigurations configurations;
+    CapSettings settings;
+};
+
+struct CapCriterion {
+    engineConfig::Criterion criterion;
+    engineConfig::CriterionType criterionType;
+};
+
+using CapCriteria = std::vector<CapCriterion>;
+using CapConfigurableDomains = std::vector<CapConfigurableDomain>;
+
+struct CapConfig {
+    CapCriteria capCriteria;
+    CapConfigurableDomains capConfigurableDomains;
+};
+
+/** Result of `parse(const char*)` */
+struct ParsingResult {
+    /** Parsed config, nullptr if the xml lib could not load the file */
+    std::unique_ptr<CapConfig> parsedConfig;
+    size_t nbSkippedElement; //< Number of skipped invalid product strategies
+};
+
+/** Convert the provided Cap Settings configuration.
+ * @return audio policy usage @see Config
+ */
+ParsingResult convert(const ::android::media::audio::common::AudioHalEngineConfig& aidlConfig);
+
+}
+}
diff --git a/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
new file mode 100644
index 0000000..a1b4470
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/config/src/CapEngineConfig.cpp
@@ -0,0 +1,500 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <cinttypes>
+#include <cstdint>
+#include <istream>
+#include <map>
+#include <sstream>
+#include <stdarg.h>
+#include <string>
+#include <string>
+#include <vector>
+
+#define LOG_TAG "APM::AudioPolicyEngine/CapConfig"
+//#define LOG_NDEBUG 0
+
+#include "CapEngineConfig.h"
+#include <TypeConverter.h>
+#include <Volume.h>
+#include <cutils/properties.h>
+#include <media/AidlConversion.h>
+#include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionUtil.h>
+#include <media/TypeConverter.h>
+#include <media/convert.h>
+#include <system/audio_config.h>
+#include <utils/Log.h>
+
+namespace android {
+
+using utilities::convertTo;
+using media::audio::common::AudioDeviceAddress;
+using media::audio::common::AudioDeviceDescription;
+using media::audio::common::AudioHalCapCriterion;
+using media::audio::common::AudioHalCapParameter;
+using media::audio::common::AudioHalCapRule;
+using media::audio::common::AudioSource;
+using media::audio::common::AudioStreamType;
+using media::audio::common::AudioHalCapCriterionV2;
+using ::android::base::unexpected;
+
+namespace capEngineConfig {
+
+static constexpr const char *gLegacyOutputDevicePrefix = "AUDIO_DEVICE_OUT_";
+static constexpr const char *gLegacyInputDevicePrefix = "AUDIO_DEVICE_IN_";
+static constexpr const char *gLegacyStreamPrefix = "AUDIO_STREAM_";
+static constexpr const char *gLegacySourcePrefix = "AUDIO_SOURCE_";
+static constexpr const char *gPolicyParamPrefix = "/Policy/policy/";
+
+namespace {
+
+ConversionResult<std::string> truncatePrefixToLower(const std::string& legacyName,
+                                                    const std::string& legacyPrefix) {
+    std::size_t pos = legacyName.find(legacyPrefix);
+    if (pos == std::string::npos) {
+        return unexpected(BAD_VALUE);
+    }
+    std::string capName = legacyName.substr(pos + legacyPrefix.length());
+    std::transform(capName.begin(), capName.end(), capName.begin(),
+                   [](unsigned char c) { return std::tolower(c); });
+    return capName;
+}
+
+ConversionResult<std::string> truncatePrefix(const std::string& name,  const std::string& prefix) {
+    std::size_t pos = name.find(prefix);
+    if (pos == std::string::npos) {
+        return unexpected(BAD_VALUE);
+    }
+    std::string capName = name.substr(pos + prefix.length());
+    return capName;
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2_CapName(
+        const AudioHalCapCriterionV2& aidl) {
+    switch (aidl.getTag()) {
+        case AudioHalCapCriterionV2::availableInputDevices:
+            return gInputDeviceCriterionName;
+        case AudioHalCapCriterionV2::availableOutputDevices:
+            return gOutputDeviceCriterionName;
+        case AudioHalCapCriterionV2::availableInputDevicesAddresses:
+            return gInputDeviceAddressCriterionName;
+        case AudioHalCapCriterionV2::availableOutputDevicesAddresses:
+            return gOutputDeviceAddressCriterionName;
+        case AudioHalCapCriterionV2::telephonyMode:
+            return gPhoneStateCriterionName;
+        case AudioHalCapCriterionV2::forceConfigForUse: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::forceConfigForUse>();
+            return gForceUseCriterionTag[VALUE_OR_RETURN(
+                    aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(
+                            aidlCriterion.forceUse))];
+        }
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
+        const AudioDeviceDescription& aidl) {
+    audio_devices_t legacyDeviceType = VALUE_OR_RETURN(
+            aidl2legacy_AudioDeviceDescription_audio_devices_t(aidl));
+    bool isOut = audio_is_output_devices(legacyDeviceType);
+    std::string legacyTypeLiteral;
+    if (!::android::DeviceConverter::toString(legacyDeviceType, legacyTypeLiteral)) {
+        ALOGE("%s Invalid strategy device type %d", __func__, legacyDeviceType);
+        return unexpected(BAD_VALUE);
+    }
+    return truncatePrefix(legacyTypeLiteral,
+            isOut ? gLegacyOutputDevicePrefix : gLegacyInputDevicePrefix);
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
+        const AudioHalCapCriterionV2::Type& aidl) {
+    switch (aidl.getTag()) {
+        case AudioHalCapCriterionV2::Type::availableDevicesType:
+            return aidl2legacy_AudioHalCapCriterionV2TypeDevice_CapCriterionValue(
+                    aidl.get<AudioHalCapCriterionV2::Type::availableDevicesType>());
+        case AudioHalCapCriterionV2::Type::availableDevicesAddressesType:
+            return aidl.get<AudioHalCapCriterionV2::Type::availableDevicesAddressesType>().template
+                    get<AudioDeviceAddress::id>();
+        case AudioHalCapCriterionV2::Type::telephonyModeType:
+            return toString(aidl.get<AudioHalCapCriterionV2::Type::telephonyModeType>());
+        case AudioHalCapCriterionV2::Type::forcedConfigType:
+            return toString(aidl.get<AudioHalCapCriterionV2::Type::forcedConfigType>());
+        default:
+            return unexpected(BAD_VALUE);
+    }
+}
+
+ConversionResult<std::string> aidl2legacy_AudioHalCapRule_CapRule(
+        const AudioHalCapRule& aidlRule) {
+    std::string rule;
+    switch (aidlRule.compoundRule) {
+        case AudioHalCapRule::CompoundRule::ANY:
+            rule += "Any";
+            break;
+        case AudioHalCapRule::CompoundRule::ALL:
+            rule += "All";
+            break;
+        default:
+            return unexpected(BAD_VALUE);
+    }
+    rule += "{";
+    if (!aidlRule.nestedRules.empty()) {
+        for (const auto& nestedRule: aidlRule.nestedRules) {
+            rule += VALUE_OR_FATAL(aidl2legacy_AudioHalCapRule_CapRule(nestedRule));
+        }
+        if (!aidlRule.criterionRules.empty()) {
+            rule += ",";
+        }
+    }
+    bool isFirstCriterionRule = true;
+    for (const auto& criterionRule: aidlRule.criterionRules) {
+        if (!isFirstCriterionRule) {
+            rule += ",";
+        }
+        isFirstCriterionRule = false;
+        std::string selectionCriterion = VALUE_OR_RETURN(
+                aidl2legacy_AudioHalCapCriterionV2_CapName(criterionRule.criterion));
+        std::string matchesWhen;
+        std::string value = VALUE_OR_RETURN(
+                aidl2legacy_AudioHalCapCriterionV2Type_CapCriterionValue(
+                        criterionRule.criterionTypeValue));
+
+        switch (criterionRule.matchingRule) {
+            case AudioHalCapRule::MatchingRule::IS:
+                matchesWhen = "Is";
+                break;
+            case AudioHalCapRule::MatchingRule::IS_NOT:
+                matchesWhen = "IsNot";
+                break;
+            case AudioHalCapRule::MatchingRule::INCLUDES:
+                matchesWhen = "Includes";
+                break;
+            case AudioHalCapRule::MatchingRule::EXCLUDES:
+                matchesWhen = "Excludes";
+                break;
+            default:
+                return unexpected(BAD_VALUE);
+        }
+        rule += selectionCriterion + " " + matchesWhen + " " + value;
+    }
+    rule += "}";
+    return rule;
+}
+
+ConversionResult<CapConfiguration> aidl2legacy_AudioHalCapConfiguration_CapConfiguration(
+        const media::audio::common::AudioHalCapConfiguration& aidl) {
+    CapConfiguration legacy;
+    legacy.name = aidl.name;
+    legacy.rule = VALUE_OR_FATAL(aidl2legacy_AudioHalCapRule_CapRule(aidl.rule));
+    return legacy;
+}
+
+ConversionResult<ConfigurableElementValue> aidl2legacy_ParameterSetting_ConfigurableElementValue(
+        const AudioHalCapParameter& aidl) {
+    ConfigurableElementValue legacy;
+    std::string literalValue;
+    switch (aidl.getTag()) {
+        case AudioHalCapParameter::selectedStrategyDevice: {
+            auto strategyDevice = aidl.get<AudioHalCapParameter::selectedStrategyDevice>();
+            literalValue = std::to_string(strategyDevice.isSelected);
+            audio_devices_t legacyType = VALUE_OR_RETURN(
+                    aidl2legacy_AudioDeviceDescription_audio_devices_t(strategyDevice.device));
+            std::string legacyTypeLiteral;
+            if (!::android::OutputDeviceConverter::toString(legacyType, legacyTypeLiteral)) {
+                ALOGE("%s Invalid device type %d", __func__, legacyType);
+                return unexpected(BAD_VALUE);
+            }
+            std::string deviceLiteral = VALUE_OR_RETURN(
+                    truncatePrefixToLower(legacyTypeLiteral, gLegacyOutputDevicePrefix));
+            if (deviceLiteral == "default") {
+                deviceLiteral = "stub";
+            }
+            legacy.configurableElement.path = std::string(gPolicyParamPrefix)
+                    + "product_strategies/vx_" + std::to_string(strategyDevice.id)
+                    + "/selected_output_devices/mask/" + deviceLiteral;
+            break;
+        }
+        case AudioHalCapParameter::strategyDeviceAddress: {
+            auto strategyAddress = aidl.get<AudioHalCapParameter::strategyDeviceAddress>();
+            legacy.configurableElement.path = std::string(gPolicyParamPrefix)
+                    + "product_strategies/vx_" + std::to_string(strategyAddress.id)
+                    + "/device_address";
+            literalValue = strategyAddress.deviceAddress.get<AudioDeviceAddress::id>();
+            break;
+        }
+        case AudioHalCapParameter::selectedInputSourceDevice: {
+            auto inputSourceDevice = aidl.get<AudioHalCapParameter::selectedInputSourceDevice>();
+            literalValue = std::to_string(inputSourceDevice.isSelected);
+            audio_devices_t legacyType = VALUE_OR_RETURN(
+                    aidl2legacy_AudioDeviceDescription_audio_devices_t(inputSourceDevice.device));
+            std::string legacyTypeLiteral;
+            if (!::android::InputDeviceConverter::toString(legacyType, legacyTypeLiteral)) {
+                ALOGE("%s Invalid input source device type %d", __func__, legacyType);
+                return unexpected(BAD_VALUE);
+            }
+            std::string deviceLiteral = VALUE_OR_RETURN(
+                    truncatePrefixToLower(legacyTypeLiteral, gLegacyInputDevicePrefix));
+            if (deviceLiteral == "default") {
+                deviceLiteral = "stub";
+            }
+            audio_source_t legacySource = VALUE_OR_RETURN(aidl2legacy_AudioSource_audio_source_t(
+                    inputSourceDevice.inputSource));
+            std::string inputSourceLiteral;
+            if (!::android::SourceTypeConverter::toString(legacySource, inputSourceLiteral)) {
+                ALOGE("%s Invalid input source  %d", __func__, legacySource);
+                return unexpected(BAD_VALUE);
+            }
+            inputSourceLiteral = VALUE_OR_RETURN(
+                    truncatePrefixToLower(inputSourceLiteral, gLegacySourcePrefix));
+            legacy.configurableElement.path = std::string(gPolicyParamPrefix) + "input_sources/"
+                    + inputSourceLiteral + "/applicable_input_device/mask/" + deviceLiteral;
+            break;
+        }
+        case AudioHalCapParameter::streamVolumeProfile: {
+            auto streamVolumeProfile = aidl.get<AudioHalCapParameter::streamVolumeProfile>();
+            audio_stream_type_t legacyStreamType = VALUE_OR_RETURN(
+                    aidl2legacy_AudioStreamType_audio_stream_type_t(streamVolumeProfile.stream));
+            std::string legacyStreamLiteral;
+            if (!::android::StreamTypeConverter::toString(legacyStreamType, legacyStreamLiteral)) {
+                ALOGE("%s Invalid stream type  %d", __func__, legacyStreamType);
+                return unexpected(BAD_VALUE);
+            }
+            legacyStreamLiteral = VALUE_OR_RETURN(
+                    truncatePrefixToLower(legacyStreamLiteral, gLegacyStreamPrefix));
+
+            audio_stream_type_t legacyProfile = VALUE_OR_RETURN(
+                    aidl2legacy_AudioStreamType_audio_stream_type_t(streamVolumeProfile.profile));
+            std::string legacyProfileLiteral;
+            if (!::android::StreamTypeConverter::toString(legacyProfile, legacyProfileLiteral)) {
+                ALOGE("%s Invalid profile %d", __func__, legacyProfile);
+                return unexpected(BAD_VALUE);
+            }
+            literalValue = VALUE_OR_RETURN(
+                    truncatePrefixToLower(legacyProfileLiteral, gLegacyStreamPrefix));
+            legacy.configurableElement.path = std::string(gPolicyParamPrefix) + "streams/"
+                    + legacyStreamLiteral + "/applicable_volume_profile/volume_profile";
+            break;
+        }
+        default:
+            return unexpected(BAD_VALUE);
+    }
+    legacy.value = literalValue;
+    return legacy;
+}
+
+ConversionResult<CapSetting> aidl2legacy_AudioHalCapConfiguration_CapSetting(
+        const media::audio::common::AudioHalCapConfiguration& aidl) {
+    CapSetting legacy;
+    legacy.configurationName = aidl.name;
+    legacy.configurableElementValues = VALUE_OR_RETURN(convertContainer<ConfigurableElementValues>(
+            aidl.parameterSettings, aidl2legacy_ParameterSetting_ConfigurableElementValue));
+    return legacy;
+}
+
+ConversionResult<CapConfigurableDomain> aidl2legacy_AudioHalCapDomain_CapConfigurableDomain(
+        const media::audio::common::AudioHalCapDomain& aidl) {
+    CapConfigurableDomain legacy;
+    legacy.name = aidl.name;
+    legacy.configurations = VALUE_OR_RETURN(convertContainer<CapConfigurations>(
+            aidl.configurations,
+            aidl2legacy_AudioHalCapConfiguration_CapConfiguration));
+    legacy.settings = VALUE_OR_RETURN(convertContainer<CapSettings>(
+            aidl.configurations,
+            aidl2legacy_AudioHalCapConfiguration_CapSetting));
+    return legacy;
+}
+
+ConversionResult<CapCriterion> aidl2legacy_AudioHalCapCriterionV2_Criterion(
+            const AudioHalCapCriterionV2& aidl) {
+    CapCriterion capCriterion;
+    engineConfig::Criterion& criterion = capCriterion.criterion;
+    engineConfig::CriterionType& criterionType = capCriterion.criterionType;
+
+    auto loadForceUseCriterion = [](const auto &aidlCriterion, auto &criterion,
+            auto &criterionType) -> status_t {
+        uint32_t legacyForceUse = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(
+                        aidlCriterion.forceUse));
+        criterion.typeName = criterionType.name;
+        criterionType.name = criterion.typeName + gCriterionTypeSuffix;
+        criterionType.isInclusive =
+                (aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
+        criterion.name = gForceUseCriterionTag[legacyForceUse];
+        criterion.defaultLiteralValue = toString(aidlCriterion.defaultValue);
+        if (aidlCriterion.values.empty()) {
+            return BAD_VALUE;
+        }
+        for (auto &value : aidlCriterion.values) {
+            uint32_t legacyForcedConfig = VALUE_OR_RETURN_STATUS(
+                    aidl2legacy_AudioPolicyForcedConfig_audio_policy_forced_cfg_t(value));
+            criterionType.valuePairs.push_back({legacyForcedConfig, 0, toString(value)});
+        }
+        return NO_ERROR;
+    };
+
+    auto loadDevicesCriterion = [](const auto &aidlCriterion, auto &criterion,
+            auto &criterionType) -> status_t {
+        criterionType.name = criterion.name + gCriterionTypeSuffix;
+        criterionType.isInclusive =
+                (aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
+        criterion.typeName = criterionType.name;
+        int shift = 0;
+        if (aidlCriterion.values.empty()) {
+            return BAD_VALUE;
+        }
+        for (const auto &value : aidlCriterion.values) {
+            audio_devices_t legacyDeviceType = VALUE_OR_RETURN_STATUS(
+                    aidl2legacy_AudioDeviceDescription_audio_devices_t(value));
+            bool isOut = audio_is_output_devices(legacyDeviceType);
+            std::string legacyTypeLiteral;
+            if (!::android::DeviceConverter::toString(legacyDeviceType, legacyTypeLiteral)) {
+                ALOGE("%s Invalid device type %d", __func__, legacyDeviceType);
+                return BAD_VALUE;
+            }
+            std::string deviceLiteral = VALUE_OR_RETURN_STATUS(truncatePrefix(legacyTypeLiteral,
+                    isOut ? gLegacyOutputDevicePrefix : gLegacyInputDevicePrefix));
+            uint64_t pfwCriterionValue = 1 << shift++;
+            criterionType.valuePairs.push_back(
+                    {pfwCriterionValue, static_cast<int32_t>(legacyDeviceType), deviceLiteral});
+            ALOGV("%s: adding %" PRIu64 " %d %s %s", __func__, pfwCriterionValue, legacyDeviceType,
+                    toString(value.type).c_str(), deviceLiteral.c_str());
+        }
+        return NO_ERROR;
+    };
+
+    auto loadDeviceAddressesCriterion = [](const auto &aidlCriterion, auto &criterion,
+            auto &criterionType) -> status_t {
+        criterionType.name = criterion.name + gCriterionTypeSuffix;
+        criterionType.isInclusive =
+                (aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
+        criterion.typeName = criterionType.name;
+        int shift = 0;
+        for (auto &value : aidlCriterion.values) {
+            uint64_t pfwCriterionValue = 1 << shift++;
+            if (value.getTag() != AudioDeviceAddress::id) {
+                return BAD_VALUE;
+            }
+            std::string address = value.template get<AudioDeviceAddress::id>();
+            criterionType.valuePairs.push_back({pfwCriterionValue, 0, address});
+        }
+        return NO_ERROR;
+    };
+
+    switch (aidl.getTag()) {
+        case AudioHalCapCriterionV2::availableInputDevices: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::availableInputDevices>();
+            criterion.name = gInputDeviceCriterionName;
+            if (loadDevicesCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::availableOutputDevices: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::availableOutputDevices>();
+            criterion.name = gOutputDeviceCriterionName;
+            if (loadDevicesCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::availableInputDevicesAddresses: {
+            auto aidlCriterion =
+                    aidl.get<AudioHalCapCriterionV2::availableInputDevicesAddresses>();
+            criterion.name = gInputDeviceAddressCriterionName;
+            if (loadDeviceAddressesCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::availableOutputDevicesAddresses: {
+            auto aidlCriterion =
+                    aidl.get<AudioHalCapCriterionV2::availableOutputDevicesAddresses>();
+            criterion.name = gOutputDeviceAddressCriterionName;
+            if (loadDeviceAddressesCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::telephonyMode: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::telephonyMode>();
+            criterion.name = gPhoneStateCriterionName;
+            criterionType.name = criterion.name  + gCriterionTypeSuffix;
+            criterionType.isInclusive =
+                    (aidlCriterion.logic == AudioHalCapCriterionV2::LogicalDisjunction::INCLUSIVE);
+            criterion.typeName = criterionType.name;
+            criterion.defaultLiteralValue = toString( aidlCriterion.defaultValue);
+            if (aidlCriterion.values.empty()) {
+                return unexpected(BAD_VALUE);
+            }
+            for (auto &value : aidlCriterion.values) {
+                uint32_t legacyMode =
+                        VALUE_OR_RETURN(aidl2legacy_AudioMode_audio_mode_t(value));
+                criterionType.valuePairs.push_back({legacyMode, 0, toString(value)});
+            }
+            break;
+        }
+        case AudioHalCapCriterionV2::forceConfigForUse: {
+            auto aidlCriterion = aidl.get<AudioHalCapCriterionV2::forceConfigForUse>();
+            if (loadForceUseCriterion(aidlCriterion, criterion, criterionType) != NO_ERROR) {
+                return unexpected(BAD_VALUE);
+            }
+            break;
+        }
+        default:
+            return unexpected(BAD_VALUE);
+    }
+    return capCriterion;
+}
+
+}  // namespace
+
+ParsingResult convert(const ::android::media::audio::common::AudioHalEngineConfig& aidlConfig) {
+    auto config = std::make_unique<capEngineConfig::CapConfig>();
+
+    if (!aidlConfig.capSpecificConfig.has_value() ||
+            !aidlConfig.capSpecificConfig.value().domains.has_value()) {
+        ALOGE("%s: no Cap Engine config", __func__);
+        return ParsingResult{};
+    }
+    for (auto& aidlCriteria: aidlConfig.capSpecificConfig.value().criteriaV2.value()) {
+        if (aidlCriteria.has_value()) {
+            if (auto conv = aidl2legacy_AudioHalCapCriterionV2_Criterion(aidlCriteria.value());
+                    conv.ok()) {
+                config->capCriteria.push_back(std::move(conv.value()));
+            } else {
+                return ParsingResult{};
+            }
+        }
+    }
+    size_t skippedElement = 0;
+    for (auto& aidlDomain: aidlConfig.capSpecificConfig.value().domains.value()) {
+        if (aidlDomain.has_value()) {
+            if (auto conv = aidl2legacy_AudioHalCapDomain_CapConfigurableDomain(aidlDomain.value());
+                    conv.ok()) {
+                config->capConfigurableDomains.push_back(std::move(conv.value()));
+            } else {
+                return ParsingResult{};
+            }
+        } else {
+            skippedElement += 1;
+        }
+    }
+    return {.parsedConfig=std::move(config), .nbSkippedElement=skippedElement};
+}
+} // namespace capEngineConfig
+} // namespace android
diff --git a/services/audiopolicy/engineconfigurable/data/Android.bp b/services/audiopolicy/engineconfigurable/data/Android.bp
new file mode 100644
index 0000000..303cabc
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/Android.bp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+prebuilt_etc {
+    name: "CapClass.xml",
+    src: ":CapClass",
+    sub_dir: "parameter-framework/Structure/Policy",
+}
+
+prebuilt_etc {
+    name: "CapSubsystem.xml",
+    src: ":CapSubsystem",
+    sub_dir: "parameter-framework/Structure/Policy",
+}
+
+prebuilt_etc {
+    name: "CapSubsystem-CommonTypes.xml",
+    src: ":buildaidlcommontypesstructure_gen",
+    sub_dir: "parameter-framework/Structure/Policy",
+}
+
+prebuilt_etc {
+    name: "CapProductStrategies.xml",
+    src: ":cap_product_strategies_structure",
+    sub_dir: "parameter-framework/Structure/Policy",
+}
+
+prebuilt_etc {
+    name: "ParameterFrameworkConfigurationCap.xml",
+    src: ":ParameterFrameworkConfigurationCapSrc_gen",
+    sub_dir: "parameter-framework",
+}
+
+genrule {
+    name: "ParameterFrameworkConfigurationCapSrc_gen",
+    out: ["ParameterFrameworkConfigurationCap.xml"],
+    srcs: [":ParameterFrameworkConfigurationCapSrc"],
+    product_variables: {
+        debuggable: {
+            cmd: "sed -e 's|TuningAllowed=\"false\"|TuningAllowed=\"true\" ServerPort=\"unix:///dev/socket/audioserver/policy_debug\"|g' <$(in) > $(out)",
+        },
+    },
+    cmd: "cp -f $(in) $(out)",
+}
+
+genrule {
+    name: "buildaidlcommontypesstructure_gen",
+    defaults: ["buildcommontypesstructurerule"],
+    out: ["CapSubsystem-CommonTypes.xml"],
+}
+
+filegroup {
+    name: "ParameterFrameworkConfigurationCapSrc",
+    srcs: ["etc/ParameterFrameworkConfigurationCap.xml"],
+}
+
+filegroup {
+    name: "cap_product_strategies_structure",
+    srcs: ["etc/Structure/CapProductStrategies.xml"],
+}
+
+filegroup {
+    name: "CapSubsystem",
+    srcs: ["etc/Structure/CapSubsystem.xml"],
+}
+
+filegroup {
+    name: "aidl_common_types_structure_template",
+    srcs: ["etc/Structure/CapSubsystem-CommonTypes.xml.in"],
+}
+
+filegroup {
+    name: "CapClass",
+    srcs: ["etc/Structure/CapClass.xml"],
+}
diff --git a/services/audiopolicy/engineconfigurable/data/etc/ParameterFrameworkConfigurationCap.xml b/services/audiopolicy/engineconfigurable/data/etc/ParameterFrameworkConfigurationCap.xml
new file mode 100644
index 0000000..bac7a25
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/ParameterFrameworkConfigurationCap.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ParameterFrameworkConfiguration xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    SystemClassName="Policy" TuningAllowed="false">
+
+    <SubsystemPlugins>
+        <Location Folder="">
+            <Plugin Name="libpolicy-subsystem.so"/>
+        </Location>
+    </SubsystemPlugins>
+    <StructureDescriptionFileLocation Path="Structure/Policy/CapClass.xml"/>
+</ParameterFrameworkConfiguration>
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapClass.xml b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapClass.xml
new file mode 100644
index 0000000..e233673
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapClass.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<SystemClass xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+    xsi:noNamespaceSchemaLocation="../../Schemas/SystemClass.xsd" Name="Policy">
+    <SubsystemInclude Path="CapSubsystem.xml"/>
+</SystemClass>
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml
new file mode 100644
index 0000000..61f056a
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapProductStrategies.xml
@@ -0,0 +1,63 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<ComponentTypeSet xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+              xmlns:xi="http://www.w3.org/2001/XInclude"
+              xsi:noNamespaceSchemaLocation="Schemas/ComponentTypeSet.xsd">
+  <!-- This structure is expected to be in the system partition and provisionned a maximum
+       allowed strategies to be used by vendor. -->
+  <ComponentType Name="ProductStrategies" Description="">
+    <Component Name="vx_1000" Type="ProductStrategy" Mapping="Identifier:1000,Name:vx_1000"/>
+    <Component Name="vx_1001" Type="ProductStrategy" Mapping="Identifier:1001,Name:vx_1001"/>
+    <Component Name="vx_1002" Type="ProductStrategy" Mapping="Identifier:1002,Name:vx_1002"/>
+    <Component Name="vx_1003" Type="ProductStrategy" Mapping="Identifier:1003,Name:vx_1003"/>
+    <Component Name="vx_1004" Type="ProductStrategy" Mapping="Identifier:1004,Name:vx_1004"/>
+    <Component Name="vx_1005" Type="ProductStrategy" Mapping="Identifier:1005,Name:vx_1005"/>
+    <Component Name="vx_1006" Type="ProductStrategy" Mapping="Identifier:1006,Name:vx_1006"/>
+    <Component Name="vx_1007" Type="ProductStrategy" Mapping="Identifier:1007,Name:vx_1007"/>
+    <Component Name="vx_1008" Type="ProductStrategy" Mapping="Identifier:1008,Name:vx_1008"/>
+    <Component Name="vx_1009" Type="ProductStrategy" Mapping="Identifier:1009,Name:vx_1009"/>
+    <Component Name="vx_1010" Type="ProductStrategy" Mapping="Identifier:1010,Name:vx_1010"/>
+    <Component Name="vx_1011" Type="ProductStrategy" Mapping="Identifier:1011,Name:vx_1011"/>
+    <Component Name="vx_1012" Type="ProductStrategy" Mapping="Identifier:1012,Name:vx_1012"/>
+    <Component Name="vx_1013" Type="ProductStrategy" Mapping="Identifier:1013,Name:vx_1013"/>
+    <Component Name="vx_1014" Type="ProductStrategy" Mapping="Identifier:1014,Name:vx_1014"/>
+    <Component Name="vx_1015" Type="ProductStrategy" Mapping="Identifier:1015,Name:vx_1015"/>
+    <Component Name="vx_1016" Type="ProductStrategy" Mapping="Identifier:1016,Name:vx_1016"/>
+    <Component Name="vx_1017" Type="ProductStrategy" Mapping="Identifier:1017,Name:vx_1017"/>
+    <Component Name="vx_1018" Type="ProductStrategy" Mapping="Identifier:1018,Name:vx_1018"/>
+    <Component Name="vx_1019" Type="ProductStrategy" Mapping="Identifier:1019,Name:vx_1019"/>
+    <Component Name="vx_1020" Type="ProductStrategy" Mapping="Identifier:1020,Name:vx_1020"/>
+    <Component Name="vx_1021" Type="ProductStrategy" Mapping="Identifier:1021,Name:vx_1021"/>
+    <Component Name="vx_1022" Type="ProductStrategy" Mapping="Identifier:1022,Name:vx_1022"/>
+    <Component Name="vx_1023" Type="ProductStrategy" Mapping="Identifier:1023,Name:vx_1023"/>
+    <Component Name="vx_1024" Type="ProductStrategy" Mapping="Identifier:1024,Name:vx_1024"/>
+    <Component Name="vx_1025" Type="ProductStrategy" Mapping="Identifier:1025,Name:vx_1025"/>
+    <Component Name="vx_1026" Type="ProductStrategy" Mapping="Identifier:1026,Name:vx_1026"/>
+    <Component Name="vx_1027" Type="ProductStrategy" Mapping="Identifier:1027,Name:vx_1027"/>
+    <Component Name="vx_1028" Type="ProductStrategy" Mapping="Identifier:1028,Name:vx_1028"/>
+    <Component Name="vx_1029" Type="ProductStrategy" Mapping="Identifier:1029,Name:vx_1029"/>
+    <Component Name="vx_1030" Type="ProductStrategy" Mapping="Identifier:1030,Name:vx_1030"/>
+    <Component Name="vx_1031" Type="ProductStrategy" Mapping="Identifier:1031,Name:vx_1031"/>
+    <Component Name="vx_1032" Type="ProductStrategy" Mapping="Identifier:1032,Name:vx_1032"/>
+    <Component Name="vx_1033" Type="ProductStrategy" Mapping="Identifier:1033,Name:vx_1033"/>
+    <Component Name="vx_1034" Type="ProductStrategy" Mapping="Identifier:1034,Name:vx_1034"/>
+    <Component Name="vx_1035" Type="ProductStrategy" Mapping="Identifier:1035,Name:vx_1035"/>
+    <Component Name="vx_1036" Type="ProductStrategy" Mapping="Identifier:1036,Name:vx_1036"/>
+    <Component Name="vx_1037" Type="ProductStrategy" Mapping="Identifier:1037,Name:vx_1037"/>
+    <Component Name="vx_1038" Type="ProductStrategy" Mapping="Identifier:1038,Name:vx_1038"/>
+    <Component Name="vx_1039" Type="ProductStrategy" Mapping="Identifier:1039,Name:vx_1039"/>
+  </ComponentType>
+</ComponentTypeSet>
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem-CommonTypes.xml.in b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem-CommonTypes.xml.in
new file mode 100644
index 0000000..2c4c7b5
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem-CommonTypes.xml.in
@@ -0,0 +1,60 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<ComponentTypeSet xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+              xmlns:xi="http://www.w3.org/2001/XInclude"
+              xsi:noNamespaceSchemaLocation="Schemas/ComponentTypeSet.xsd">
+    <!-- Output devices definition as a bitfield for the supported devices per output
+    profile. It must match with the output device enum parameter.
+    -->
+     <!--#################### GLOBAL COMPONENTS BEGIN ####################-->
+
+     <!--#################### GLOBAL COMPONENTS END ####################-->
+
+    <!-- Automatically filled from audio-base.h file -->
+    <ComponentType Name="OutputDevicesMask" Description="64bit representation of devices">
+        <BitParameterBlock Name="mask" Size="64">
+        </BitParameterBlock>
+    </ComponentType>
+
+    <!-- Input devices definition as a bitfield for the supported devices per Input
+    profile. It must match with the Input device enum parameter.
+    -->
+    <!-- Automatically filled from audio-base.h file -->
+    <ComponentType Name="InputDevicesMask" Description="64bit representation of devices">
+        <BitParameterBlock Name="mask" Size="64">
+        </BitParameterBlock>
+    </ComponentType>
+
+    <!--#################### STREAM COMMON TYPES BEGIN ####################-->
+    <!-- Automatically filled from audio-base.h file. VolumeProfileType is associated to stream type -->
+    <ComponentType Name="VolumeProfileType">
+        <EnumParameter Name="volume_profile" Size="32">
+        </EnumParameter>
+    </ComponentType>
+
+    <ComponentType Name="Stream"  Mapping="Stream">
+        <Component Name="applicable_volume_profile" Type="VolumeProfileType"
+                   Description="Volume profile followed by a given stream type."/>
+    </ComponentType>
+
+    <!--#################### STREAM COMMON TYPES END ####################-->
+
+    <!--#################### INPUT SOURCE COMMON TYPES BEGIN ####################-->
+
+    <ComponentType Name="InputSource">
+        <Component Name="applicable_input_device" Type="InputDevicesMask"
+                   Mapping="InputSource" Description="Selected Input device"/>
+    </ComponentType>
+
+    <!--#################### INPUT SOURCE COMMON TYPES END ####################-->
+
+    <!--#################### PRODUCT STRATEGY COMMON TYPES BEGIN ####################-->
+
+    <ComponentType Name="ProductStrategy" Mapping="ProductStrategy">
+        <Component Name="selected_output_devices" Type="OutputDevicesMask"/>
+        <StringParameter Name="device_address" MaxLength="256"
+                         Description="if any, device address associated"/>
+    </ComponentType>
+
+    <!--#################### PRODUCT STRATEGY COMMON TYPES END ####################-->
+
+</ComponentTypeSet>
diff --git a/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem.xml b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem.xml
new file mode 100644
index 0000000..45a0bd4
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/data/etc/Structure/CapSubsystem.xml
@@ -0,0 +1,93 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2018 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<Subsystem xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+           xmlns:xi="http://www.w3.org/2001/XInclude"
+           xsi:noNamespaceSchemaLocation="Schemas/Subsystem.xsd"
+           Name="policy" Type="Policy">
+
+    <ComponentLibrary>
+        <!--#################### GLOBAL COMPONENTS BEGIN ####################-->
+        <!-- Common Types definition -->
+        <xi:include href="CapSubsystem-CommonTypes.xml"/>
+        <xi:include href="CapProductStrategies.xml"/>
+
+
+        <!--#################### GLOBAL COMPONENTS END ####################-->
+
+        <!--#################### STREAM BEGIN ####################-->
+
+        <ComponentType Name="Streams" Description="associated to audio_stream_type_t definition">
+            <Component Name="voice_call" Type="Stream" Mapping="Name:AUDIO_STREAM_VOICE_CALL"/>
+            <Component Name="system" Type="Stream" Mapping="Name:AUDIO_STREAM_SYSTEM"/>
+            <Component Name="ring" Type="Stream" Mapping="Name:AUDIO_STREAM_RING"/>
+            <Component Name="music" Type="Stream" Mapping="Name:AUDIO_STREAM_MUSIC"/>
+            <Component Name="alarm" Type="Stream" Mapping="Name:AUDIO_STREAM_ALARM"/>
+            <Component Name="notification" Type="Stream" Mapping="Name:AUDIO_STREAM_NOTIFICATION"/>
+            <Component Name="bluetooth_sco" Type="Stream" Mapping="Name:AUDIO_STREAM_BLUETOOTH_SCO"/>
+            <Component Name="enforced_audible" Type="Stream" Mapping="Name:AUDIO_STREAM_ENFORCED_AUDIBLE"
+                       Description="Sounds that cannot be muted by user and must be routed to speaker"/>
+            <Component Name="dtmf" Type="Stream" Mapping="Name:AUDIO_STREAM_DTMF"/>
+            <Component Name="tts" Type="Stream" Mapping="Name:AUDIO_STREAM_TTS"
+                             Description="Transmitted Through Speaker. Plays over speaker only, silent on other devices"/>
+            <Component Name="accessibility" Type="Stream" Mapping="Name:AUDIO_STREAM_ACCESSIBILITY"
+                             Description="For accessibility talk back prompts"/>
+            <Component Name="assistant" Type="Stream" Mapping="Name:AUDIO_STREAM_ASSISTANT"
+                             Description="used by a virtual assistant like Google Assistant, Bixby, etc."/>
+            <Component Name="rerouting" Type="Stream" Mapping="Name:AUDIO_STREAM_REROUTING"
+                             Description="For dynamic policy output mixes"/>
+            <Component Name="patch" Type="Stream" Mapping="Name:AUDIO_STREAM_PATCH"
+                             Description="For internal audio flinger tracks. Fixed volume"/>
+        </ComponentType>
+
+        <!--#################### STREAM END ####################-->
+
+        <!--#################### INPUT SOURCE BEGIN ####################-->
+
+        <ComponentType Name="InputSources" Description="associated to audio_source_t definition,
+                             identifier mapping must match the value of the enum">
+            <Component Name="default" Type="InputSource" Mapping="Name:AUDIO_SOURCE_DEFAULT"/>
+            <Component Name="mic" Type="InputSource" Mapping="Name:AUDIO_SOURCE_MIC"/>
+            <Component Name="voice_uplink" Type="InputSource"
+                                           Mapping="Name:AUDIO_SOURCE_VOICE_UPLINK"/>
+            <Component Name="voice_downlink" Type="InputSource"
+                                             Mapping="Name:AUDIO_SOURCE_VOICE_DOWNLINK"/>
+            <Component Name="voice_call" Type="InputSource"
+                                         Mapping="Name:AUDIO_SOURCE_VOICE_CALL"/>
+            <Component Name="camcorder" Type="InputSource" Mapping="Name:AUDIO_SOURCE_CAMCORDER"/>
+            <Component Name="voice_recognition" Type="InputSource"
+                                                Mapping="Name:AUDIO_SOURCE_VOICE_RECOGNITION"/>
+            <Component Name="voice_communication" Type="InputSource"
+                                                  Mapping="Name:AUDIO_SOURCE_VOICE_COMMUNICATION"/>
+            <Component Name="remote_submix" Type="InputSource"
+                                            Mapping="Name:AUDIO_SOURCE_REMOTE_SUBMIX"/>
+            <Component Name="unprocessed" Type="InputSource"
+                                            Mapping="Name:AUDIO_SOURCE_UNPROCESSED"/>
+            <Component Name="voice_performance" Type="InputSource"
+                                            Mapping="Name:AUDIO_SOURCE_VOICE_PERFORMANCE"/>
+            <Component Name="echo_reference" Type="InputSource"
+                                            Mapping="Name:AUDIO_SOURCE_ECHO_REFERENCE"/>
+            <Component Name="fm_tuner" Type="InputSource" Mapping="Name:AUDIO_SOURCE_FM_TUNER"/>
+            <Component Name="hotword" Type="InputSource" Mapping="Name:AUDIO_SOURCE_HOTWORD"/>
+        </ComponentType>
+        <!--#################### INPUT SOURCE END ####################-->
+    </ComponentLibrary>
+
+    <InstanceDefinition>
+        <Component Name="streams" Type="Streams"/>
+        <Component Name="input_sources" Type="InputSources"/>
+        <Component Name="product_strategies" Type="ProductStrategies"/>
+    </InstanceDefinition>
+</Subsystem>
diff --git a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
index 9fd8b8e..e0b7210 100644
--- a/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
+++ b/services/audiopolicy/engineconfigurable/interface/AudioPolicyPluginInterface.h
@@ -98,7 +98,8 @@
     virtual bool setDeviceTypesForProductStrategy(product_strategy_t strategy,
                                                   uint64_t devices) = 0;
 
-    virtual product_strategy_t getProductStrategyByName(const std::string &address) = 0;
+    virtual product_strategy_t getProductStrategyByName(const std::string &name) = 0;
+    virtual std::string getProductStrategyName(product_strategy_t id) const = 0;
 
 protected:
     virtual ~AudioPolicyPluginInterface() {}
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
index 7fe111f..e4f44d5 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/Android.bp
@@ -50,6 +50,7 @@
 genrule {
     name: "buildcommontypesstructure_gen",
     defaults: ["buildcommontypesstructurerule"],
+    out: ["PolicySubsystem-CommonTypes.xml"],
 }
 
 filegroup {
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
index 3dc2229..c9a77a4 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/Android.bp
@@ -38,6 +38,8 @@
     shared_libs: [
         "libaudiopolicycomponents",
         "libaudiopolicyengineconfigurable",
+        "libbase",
+        "libcutils",
         "liblog",
         "libmedia_helper",
         "libparameter",
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
index 8bd7f66..bf5767d 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/PolicySubsystem.cpp
@@ -72,7 +72,7 @@
         );
     addSubsystemObjectFactory(
         new TSubsystemObjectFactory<ProductStrategy>(
-            mProductStrategyComponentName, (1 << MappingKeyName))
+            mProductStrategyComponentName, (1 << MappingKeyName) | (1 << MappingKeyIdentifier))
         );
 }
 
diff --git a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
index ebd9456..06efbf28 100644
--- a/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
+++ b/services/audiopolicy/engineconfigurable/parameter-framework/plugin/ProductStrategy.cpp
@@ -18,10 +18,7 @@
 #include "PolicyMappingKeys.h"
 #include "PolicySubsystem.h"
 
-using std::string;
-using android::product_strategy_t;
-
-ProductStrategy::ProductStrategy(const string &mappingValue,
+ProductStrategy::ProductStrategy(const std::string &mappingValue,
                    CInstanceConfigurableElement *instanceConfigurableElement,
                    const CMappingContext &context,
                    core::log::Logger& logger)
@@ -30,26 +27,32 @@
                                 mappingValue,
                                 MappingKeyAmend1,
                                 (MappingKeyAmendEnd - MappingKeyAmend1 + 1),
-                                context)
-{
-    std::string name(context.getItem(MappingKeyName));
+                                context) {
+
+    size_t id = context.getItemAsInteger(MappingKeyIdentifier);
+    std::string nameFromStructure(context.getItem(MappingKeyName));
 
     ALOG_ASSERT(instanceConfigurableElement != nullptr, "Invalid Configurable Element");
     mPolicySubsystem = static_cast<const PolicySubsystem *>(
-                instanceConfigurableElement->getBelongingSubsystem());
+            instanceConfigurableElement->getBelongingSubsystem());
     ALOG_ASSERT(mPolicySubsystem != nullptr, "Invalid Policy Subsystem");
 
     mPolicyPluginInterface = mPolicySubsystem->getPolicyPluginInterface();
     ALOG_ASSERT(mPolicyPluginInterface != nullptr, "Invalid Policy Plugin Interface");
 
-    mId = mPolicyPluginInterface->getProductStrategyByName(name);
+    mId = static_cast<android::product_strategy_t>(id);
+    std::string name = mPolicyPluginInterface->getProductStrategyName(mId);
+    if (name.empty()) {
+        name = nameFromStructure;
+        mId = mPolicyPluginInterface->getProductStrategyByName(name);
+    }
 
     ALOG_ASSERT(mId != PRODUCT_STRATEGY_INVALID, "Product Strategy %s not found", name.c_str());
 
     ALOGE("Product Strategy %s added", name.c_str());
 }
 
-bool ProductStrategy::sendToHW(string & /*error*/)
+bool ProductStrategy::sendToHW(std::string & /*error*/)
 {
     Device deviceParams;
     blackboardRead(&deviceParams, sizeof(deviceParams));
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.cpp b/services/audiopolicy/engineconfigurable/src/Engine.cpp
index ccd4316..45da7b0 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.cpp
+++ b/services/audiopolicy/engineconfigurable/src/Engine.cpp
@@ -71,27 +71,75 @@
 }
 
 status_t Engine::loadFromHalConfigWithFallback(
-        const media::audio::common::AudioHalEngineConfig& config __unused) {
-    // b/242678729. Need to implement for the configurable engine.
-    return INVALID_OPERATION;
-}
+        const media::audio::common::AudioHalEngineConfig& aidlConfig) {
 
-status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath)
-{
-    status_t loadResult = loadAudioPolicyEngineConfig(xmlFilePath);
-    if (loadResult < 0) {
-        ALOGE("Policy Engine configuration is invalid.");
+    auto capResult = capEngineConfig::convert(aidlConfig);
+    if (capResult.parsedConfig == nullptr) {
+        ALOGE("%s CapEngine Config invalid", __func__);
+        return BAD_VALUE;
     }
-    return loadResult;
-}
-
-status_t Engine::initCheck()
-{
+    status_t ret = loadWithFallback(aidlConfig);
+    if (ret != NO_ERROR) {
+        return ret;
+    }
+    auto loadCriteria= [this](const auto& capCriteria) {
+        for (auto& capCriterion : capCriteria) {
+            mPolicyParameterMgr->addCriterion(capCriterion.criterion.name,
+                    capCriterion.criterionType.isInclusive,
+                    capCriterion.criterionType.valuePairs,
+                    capCriterion.criterion.defaultLiteralValue);
+        }
+    };
+    loadCriteria(capResult.parsedConfig->capCriteria);
     std::string error;
     if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
         ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
         return NO_INIT;
     }
+    return mPolicyParameterMgr->setConfiguration(capResult);
+}
+
+status_t Engine::loadFromXmlConfigWithFallback(const std::string& xmlFilePath)
+{
+    status_t status = loadWithFallback(xmlFilePath);
+    std::string error;
+    if (mPolicyParameterMgr == nullptr || mPolicyParameterMgr->start(error) != NO_ERROR) {
+        ALOGE("%s: could not start Policy PFW: %s", __FUNCTION__, error.c_str());
+        return NO_INIT;
+    }
+    return status;
+}
+
+template<typename T>
+status_t Engine::loadWithFallback(const T& configSource) {
+    auto result = EngineBase::loadAudioPolicyEngineConfig(configSource);
+    ALOGE_IF(result.nbSkippedElement != 0,
+             "Policy Engine configuration is partially invalid, skipped %zu elements",
+             result.nbSkippedElement);
+
+    auto loadCriteria= [this](const auto& configCriteria, const auto& configCriterionTypes) {
+        for (auto& criterion : configCriteria) {
+            engineConfig::CriterionType criterionType;
+            for (auto &configCriterionType : configCriterionTypes) {
+                if (configCriterionType.name == criterion.typeName) {
+                    criterionType = configCriterionType;
+                    break;
+                }
+            }
+            ALOG_ASSERT(not criterionType.name.empty(), "Invalid criterion type for %s",
+                        criterion.name.c_str());
+            mPolicyParameterMgr->addCriterion(criterion.name, criterionType.isInclusive,
+                                              criterionType.valuePairs,
+                                              criterion.defaultLiteralValue);
+        }
+    };
+
+    loadCriteria(result.parsedConfig->criteria, result.parsedConfig->criterionTypes);
+    return result.nbSkippedElement == 0? NO_ERROR : BAD_VALUE;
+}
+
+status_t Engine::initCheck()
+{
     return EngineBase::initCheck();
 }
 
@@ -199,32 +247,6 @@
     return EngineBase::setDeviceConnectionState(device, state);
 }
 
-status_t Engine::loadAudioPolicyEngineConfig(const std::string& xmlFilePath)
-{
-    auto result = EngineBase::loadAudioPolicyEngineConfig(xmlFilePath);
-
-    // Custom XML Parsing
-    auto loadCriteria= [this](const auto& configCriteria, const auto& configCriterionTypes) {
-        for (auto& criterion : configCriteria) {
-            engineConfig::CriterionType criterionType;
-            for (auto &configCriterionType : configCriterionTypes) {
-                if (configCriterionType.name == criterion.typeName) {
-                    criterionType = configCriterionType;
-                    break;
-                }
-            }
-            ALOG_ASSERT(not criterionType.name.empty(), "Invalid criterion type for %s",
-                        criterion.name.c_str());
-            mPolicyParameterMgr->addCriterion(criterion.name, criterionType.isInclusive,
-                                              criterionType.valuePairs,
-                                              criterion.defaultLiteralValue);
-        }
-    };
-
-    loadCriteria(result.parsedConfig->criteria, result.parsedConfig->criterionTypes);
-    return result.nbSkippedElement == 0? NO_ERROR : BAD_VALUE;
-}
-
 status_t Engine::setDevicesRoleForStrategy(product_strategy_t strategy, device_role_t role,
                                            const AudioDeviceTypeAddrVector &devices)
 {
diff --git a/services/audiopolicy/engineconfigurable/src/Engine.h b/services/audiopolicy/engineconfigurable/src/Engine.h
index 4f3e620..d9ebbe7 100644
--- a/services/audiopolicy/engineconfigurable/src/Engine.h
+++ b/services/audiopolicy/engineconfigurable/src/Engine.h
@@ -16,10 +16,11 @@
 
 #pragma once
 
-#include "EngineBase.h"
-#include <EngineInterface.h>
-#include <AudioPolicyPluginInterface.h>
 #include "Collection.h"
+#include "EngineBase.h"
+#include <AudioPolicyPluginInterface.h>
+#include <CapEngineConfig.h>
+#include <EngineInterface.h>
 
 namespace android {
 class AudioPolicyManagerObserver;
@@ -107,8 +108,14 @@
     {
         return EngineBase::getProductStrategyByName(name);
     }
+    std::string getProductStrategyName(product_strategy_t id) const override {
+        return EngineBase::getProductStrategyName(id);
+    }
 
 private:
+    template<typename T>
+    status_t loadWithFallback(const T& configSource);
+
     android::status_t disableDevicesForStrategy(product_strategy_t strategy,
             const DeviceVector &devicesToDisable);
     void enableDevicesForStrategy(product_strategy_t strategy, const DeviceVector &devicesToEnable);
@@ -140,8 +147,6 @@
     template <typename Property, typename Key>
     bool setPropertyForKey(const Property &property, const Key &key);
 
-    status_t loadAudioPolicyEngineConfig(const std::string& xmlFilePath);
-
     DeviceVector getCachedDevices(product_strategy_t ps) const;
 
     ///
diff --git a/services/audiopolicy/engineconfigurable/tools/Android.bp b/services/audiopolicy/engineconfigurable/tools/Android.bp
index d1fb2fb..7ae124c 100644
--- a/services/audiopolicy/engineconfigurable/tools/Android.bp
+++ b/services/audiopolicy/engineconfigurable/tools/Android.bp
@@ -23,7 +23,7 @@
 }
 
 //##################################################################################################
-// Tools for audio policy engine criterion type configuration file
+// Legacy tools for audio policy engine criterion type configuration file
 //
 python_binary_host {
     name: "buildPolicyCriterionTypes",
@@ -57,6 +57,40 @@
 }
 
 //##################################################################################################
+// Tools for audio policy engine criterion type configuration file
+//
+python_binary_host {
+    name: "capBuildPolicyCriterionTypes",
+    main: "capBuildPolicyCriterionTypes.py",
+    srcs: [
+        "capBuildPolicyCriterionTypes.py",
+    ],
+}
+
+genrule_defaults {
+    name: "capbuildpolicycriteriontypesrule",
+    tools: ["capBuildPolicyCriterionTypes"],
+    cmd: "cp $(locations :audio_policy_configuration_files) $(genDir)/. && " +
+        "cp $(location :audio_policy_configuration_top_file) $(genDir)/audio_policy_configuration.xml && " +
+        "$(location capBuildPolicyCriterionTypes) " +
+        " --androidaudiobaseheader $(location :libaudio_system_audio_base) " +
+        " --androidaudiocommonbaseheader $(location :libaudio_system_audio_common_base) " +
+        "--audiopolicyconfigurationfile $(genDir)/audio_policy_configuration.xml " +
+        "--criteriontypes $(location :audio_policy_engine_aidl_criterion_types_template) " +
+        "--outputfile $(out)",
+    srcs: [
+        // The commented inputs must be provided to use this genrule_defaults
+        // @todo uncomment if 1428659 is merged":android_audio_base_header_file",
+        ":audio_policy_engine_aidl_criterion_types_template",
+        ":libaudio_system_audio_base",
+        ":libaudio_system_audio_common_base",
+        // ":audio_policy_configuration_top_file",
+        // ":audio_policy_configuration_files",
+    ],
+    out: ["audio_policy_engine_criterion_types.xml"],
+}
+
+//##################################################################################################
 // Tools for audio policy engine parameter framework configurable domains
 //
 python_binary_host {
@@ -84,6 +118,7 @@
     cmd: "mkdir -p $(genDir)/Structure/Policy && " +
         "cp $(locations :audio_policy_pfw_structure_files) $(genDir)/Structure/Policy && " +
         "cp $(location :audio_policy_pfw_toplevel) $(genDir)/top_level && " +
+        "sed -i -e 's|TuningAllowed=\"false\"|TuningAllowed=\"true\" ServerPort=\"unix:///dev/socket/audioserver/policy_debug\"|g' $(genDir)/top_level &&" +
         "$(location domainGeneratorPolicy) " +
         "--validate " +
         "--domain-generator-tool $(location domainGeneratorConnector) " +
@@ -106,7 +141,7 @@
 }
 
 //##################################################################################################
-// Tools for policy parameter-framework product strategies structure file generation
+// Legacy tools for policy parameter-framework product strategies structure file generation
 //
 python_binary_host {
     name: "buildStrategiesStructureFile",
@@ -154,5 +189,4 @@
         ":common_types_structure_template",
         ":libaudio_system_audio_base",
     ],
-    out: ["PolicySubsystem-CommonTypes.xml"],
 }
diff --git a/services/audiopolicy/engineconfigurable/tools/buildStrategiesStructureFile.py b/services/audiopolicy/engineconfigurable/tools/buildStrategiesStructureFile.py
index f69d346..6eabc57 100755
--- a/services/audiopolicy/engineconfigurable/tools/buildStrategiesStructureFile.py
+++ b/services/audiopolicy/engineconfigurable/tools/buildStrategiesStructureFile.py
@@ -75,7 +75,7 @@
     strategy_components = strategies_root.find('ComponentType')
 
     for strategy_name in strategies:
-        context_mapping = "".join(map(str, ["Name:", strategy_name]))
+        context_mapping = "".join(map(str, ["Identifier:-1,Name:", strategy_name]))
         strategy_pfw_name = strategy_name.replace('STRATEGY_', '').lower()
         ET.SubElement(strategy_components, "Component",
                       Name=strategy_pfw_name, Type="ProductStrategy",
diff --git a/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py b/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
new file mode 100755
index 0000000..b873830
--- /dev/null
+++ b/services/audiopolicy/engineconfigurable/tools/capBuildPolicyCriterionTypes.py
@@ -0,0 +1,368 @@
+#!/usr/bin/python3
+
+#
+# Copyright 2018, The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+import re
+import sys
+import os
+import logging
+import xml.etree.ElementTree as ET
+import xml.etree.ElementInclude as EI
+import xml.dom.minidom as MINIDOM
+from collections import OrderedDict
+
+#
+# Helper script that helps to feed at build time the XML criterion types file used by
+# the engineconfigurable to start the parameter-framework.
+# It prevents to fill them manually and avoid divergences with android.
+#
+# The Device Types criterion types are fed from audio-base.h file with the option
+#           --androidaudiobaseheader <path/to/android/audio/base/file/audio-base.h>
+#
+# The Device Addresses criterion types are fed from the audio policy configuration file
+# in order to discover all the devices for which the address matter.
+#           --audiopolicyconfigurationfile <path/to/audio_policy_configuration.xml>
+#
+# The reference file of criterion types must also be set as an input of the script:
+#           --criteriontypes <path/to/criterion/file/audio_criterion_types.xml.in>
+#
+# At last, the output of the script shall be set also:
+#           --outputfile <path/to/out/vendor/etc/audio_criterion_types.xml>
+#
+
+def parseArgs():
+    argparser = argparse.ArgumentParser(description="Parameter-Framework XML \
+                                        audio criterion type file generator.\n\
+                                        Exit with the number of (recoverable or not) \
+                                        error that occurred.")
+    argparser.add_argument('--androidaudiobaseheader',
+                           help="Android Audio Base C header file, Mandatory.",
+                           metavar="ANDROID_AUDIO_BASE_HEADER",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--androidaudiocommonbaseheader',
+                           help="Android Audio CommonBase C header file, Mandatory.",
+                           metavar="ANDROID_AUDIO_COMMON_BASE_HEADER",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--audiopolicyconfigurationfile',
+                           help="Android Audio Policy Configuration file, Mandatory.",
+                           metavar="(AUDIO_POLICY_CONFIGURATION_FILE)",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--criteriontypes',
+                           help="Criterion types XML base file, in \
+                           '<criterion_types> \
+                               <criterion_type name="" type=<inclusive|exclusive> \
+                               values=<value1,value2,...>/>' \
+                           format. Mandatory.",
+                           metavar="CRITERION_TYPE_FILE",
+                           type=argparse.FileType('r'),
+                           required=True)
+    argparser.add_argument('--outputfile',
+                           help="Criterion types outputfile file. Mandatory.",
+                           metavar="CRITERION_TYPE_OUTPUT_FILE",
+                           type=argparse.FileType('w'),
+                           required=True)
+    argparser.add_argument('--verbose',
+                           action='store_true')
+
+    return argparser.parse_args()
+
+
+output_devices_type_value = {}
+input_devices_type_value = {}
+
+def generateXmlCriterionTypesFile(criterionTypes, addressCriteria, criterionTypesFile, outputFile):
+
+    logging.info("Importing criterionTypesFile {}".format(criterionTypesFile))
+    criterion_types_in_tree = ET.parse(criterionTypesFile)
+
+    criterion_types_root = criterion_types_in_tree.getroot()
+
+    for criterion_name, values_dict in criterionTypes.items():
+        for criterion_type in criterion_types_root.findall('criterion_type'):
+            if criterion_type.get('name') == criterion_name:
+                values_node = ET.SubElement(criterion_type, "values")
+                ordered_values = OrderedDict(sorted(values_dict.items(), key=lambda x: x[1]))
+                for key, value in ordered_values.items():
+                    value_node = ET.SubElement(values_node, "value")
+                    value_node.set('numerical', str(value))
+                    value_node.set('literal', key)
+
+                    if criterion_type.get('name') == "OutputDevicesMaskType":
+                        value_node.set('android_type', output_devices_type_value[key])
+                    if criterion_type.get('name') == "InputDevicesMaskType":
+                        value_node.set('android_type', input_devices_type_value[key])
+
+    if addressCriteria:
+        for criterion_name, values_list in addressCriteria.items():
+            for criterion_type in criterion_types_root.findall('criterion_type'):
+                if criterion_type.get('name') == criterion_name:
+                    index = 0
+                    existing_values_node = criterion_type.find("values")
+                    if existing_values_node is not None:
+                        for existing_value in existing_values_node.findall('value'):
+                            if existing_value.get('numerical') == str(1 << index):
+                                index += 1
+                        values_node = existing_values_node
+                    else:
+                        values_node = ET.SubElement(criterion_type, "values")
+
+                    for value in values_list:
+                        value_node = ET.SubElement(values_node, "value", literal=value)
+                        value_node.set('numerical', str(1 << index))
+                        index += 1
+
+    xmlstr = ET.tostring(criterion_types_root, encoding='utf8', method='xml')
+    reparsed = MINIDOM.parseString(xmlstr)
+    prettyXmlStr = reparsed.toprettyxml(newl='\r\n')
+    prettyXmlStr = os.linesep.join([s for s in prettyXmlStr.splitlines() if s.strip()])
+    outputFile.write(prettyXmlStr)
+
+def capitalizeLine(line):
+    return ' '.join((w.capitalize() for w in line.split(' ')))
+
+
+#
+# Parse the audio policy configuration file and output a dictionary of device criteria addresses
+#
+def parseAndroidAudioPolicyConfigurationFile(audiopolicyconfigurationfile):
+
+    logging.info("Checking Audio Policy Configuration file {}".format(audiopolicyconfigurationfile))
+    #
+    # extract all devices addresses from audio policy configuration file
+    #
+    address_criteria_mapping_table = {
+        'sink' : "OutputDevicesAddressesType",
+        'source' : "InputDevicesAddressesType"}
+
+    address_criteria = {
+        'OutputDevicesAddressesType' : [],
+        'InputDevicesAddressesType' : []}
+
+    old_working_dir = os.getcwd()
+    print("Current working directory %s" % old_working_dir)
+
+    new_dir = os.path.join(old_working_dir, audiopolicyconfigurationfile.name)
+
+    policy_in_tree = ET.parse(audiopolicyconfigurationfile)
+    os.chdir(os.path.dirname(os.path.normpath(new_dir)))
+
+    print("new working directory %s" % os.getcwd())
+
+    policy_root = policy_in_tree.getroot()
+    EI.include(policy_root)
+
+    os.chdir(old_working_dir)
+
+    for device in policy_root.iter('devicePort'):
+        for key in address_criteria_mapping_table.keys():
+            if device.get('role') == key and device.get('address'):
+                logging.info("{}: <{}>".format(key, device.get('address')))
+                address_criteria[address_criteria_mapping_table[key]].append(device.get('address'))
+
+    for criteria in address_criteria:
+        values = ','.join(address_criteria[criteria])
+        logging.info("{}: <{}>".format(criteria, values))
+
+    return address_criteria
+
+#
+# Parse the audio-base.h file and output a dictionary of android dependent criterion types:
+#   -Android Mode
+#   -Output devices type
+#   -Input devices type
+#
+def parseAndroidAudioFile(androidaudiobaseheaderFile, androidaudiocommonbaseheaderFile):
+    #
+    # Adaptation table between Android Enumeration prefix and Audio PFW Criterion type names
+    #
+    criterion_mapping_table = {
+        'HAL_AUDIO_MODE' : "AndroidModeType",
+        'AUDIO_DEVICE_OUT' : "OutputDevicesMaskType",
+        'AUDIO_DEVICE_IN' : "InputDevicesMaskType"}
+
+    all_criteria = {
+        'AndroidModeType' : {},
+        'OutputDevicesMaskType' : {},
+        'InputDevicesMaskType' : {}}
+
+    #
+    # _CNT, _MAX, _ALL and _NONE are prohibited values as ther are just helpers for enum users.
+    #
+    ignored_values = ['CNT', 'MAX', 'ALL', 'NONE']
+
+    multi_bit_outputdevice_shift = 32
+    multi_bit_inputdevice_shift = 32
+
+    criteria_pattern = re.compile(
+        r"\s*V\((?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*,\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])?[0-9a-fA-F]+|[0-9]+)")
+
+    logging.info("Checking Android Header file {}".format(androidaudiobaseheaderFile))
+
+    for line_number, line in enumerate(androidaudiobaseheaderFile):
+        match = criteria_pattern.match(line)
+        if match:
+            logging.debug("The following line is VALID: {}:{}\n{}".format(
+                androidaudiobaseheaderFile.name, line_number, line))
+
+            criterion_name = criterion_mapping_table[match.groupdict()['type']]
+            criterion_literal = ''.join(match.groupdict()['literal'])
+            criterion_numerical_value = match.groupdict()['values']
+
+            if criterion_name == "InputDevicesMaskType":
+                # Remove ambient and in_communication since they were deprecated
+                logging.info("Remove deprecated device {}".format(criterion_literal))
+                if criterion_literal == "AMBIENT" or criterion_literal == "COMMUNICATION":
+                    logging.info("Remove deprecated device {}".format(criterion_literal))
+                    continue
+                # for AUDIO_DEVICE_IN: rename default to stub
+                elif criterion_literal == "DEFAULT":
+                    criterion_numerical_value = str(int("0x40000000", 0))
+                    input_devices_type_value[criterion_literal] = "0xC0000000"
+                else:
+                    try:
+                        string_int = int(criterion_numerical_value, 0)
+                        # Append AUDIO_DEVICE_IN for android type tag
+                        input_devices_type_value[criterion_literal] = hex(string_int | 2147483648)
+
+                        num_bits = bin(string_int).count("1")
+                        if num_bits > 1:
+                            logging.info("The value {}:{} is for criterion {} binary rep {} has {} bits sets"
+                                .format(criterion_numerical_value, criterion_literal, criterion_name, bin(string_int), num_bits))
+                            string_int = 2**multi_bit_inputdevice_shift
+                            logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                            multi_bit_inputdevice_shift += 1
+                            criterion_numerical_value = str(string_int)
+
+                    except ValueError:
+                        # Handle the exception
+                        logging.info("value {}:{} for criterion {} is not a number, ignoring"
+                            .format(criterion_numerical_value, criterion_literal, criterion_name))
+                        continue
+
+            if criterion_name == "OutputDevicesMaskType":
+                if criterion_literal == "DEFAULT":
+                    criterion_numerical_value = str(int("0x40000000", 0))
+                    output_devices_type_value[criterion_literal] = "0x40000000"
+                else:
+                    try:
+                        string_int = int(criterion_numerical_value, 0)
+                        output_devices_type_value[criterion_literal] = criterion_numerical_value
+
+                        num_bits = bin(string_int).count("1")
+                        if num_bits > 1:
+                            logging.info("The value {}:{} is for criterion {} binary rep {} has {} bits sets"
+                                .format(criterion_numerical_value, criterion_literal, criterion_name, bin(string_int), num_bits))
+                            string_int = 2**multi_bit_outputdevice_shift
+                            logging.info("new val assigned is {} {}" .format(string_int, bin(string_int)))
+                            multi_bit_outputdevice_shift += 1
+                            criterion_numerical_value = str(string_int)
+
+                    except ValueError:
+                        # Handle the exception
+                        logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                            .format(criterion_numerical_value, criterion_literal, criterion_name))
+                        continue
+
+            try:
+                string_int = int(criterion_numerical_value, 0)
+
+            except ValueError:
+                # Handle the exception
+                logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                    .format(criterion_numerical_value, criterion_literal, criterion_name))
+                continue
+
+            # Remove duplicated numerical values
+            if int(criterion_numerical_value, 0) in all_criteria[criterion_name].values():
+                logging.info("criterion {} duplicated values:".format(criterion_name))
+                logging.info("{}:{}".format(criterion_numerical_value, criterion_literal))
+                logging.info("KEEPING LATEST")
+                for key in list(all_criteria[criterion_name]):
+                    if all_criteria[criterion_name][key] == int(criterion_numerical_value, 0):
+                        del all_criteria[criterion_name][key]
+
+            all_criteria[criterion_name][criterion_literal] = int(criterion_numerical_value, 0)
+
+            logging.debug("type:{},".format(criterion_name))
+            logging.debug("iteral:{},".format(criterion_literal))
+            logging.debug("values:{}.".format(criterion_numerical_value))
+
+    logging.info("Checking Android Common Header file {}".format(androidaudiocommonbaseheaderFile))
+
+    criteria_pattern = re.compile(
+        r"\s*(?P<type>(?:"+'|'.join(criterion_mapping_table.keys()) + "))_" \
+        r"(?P<literal>(?!" + '|'.join(ignored_values) + ")\w*)\s*=\s*" \
+        r"(?:AUDIO_DEVICE_BIT_IN \| )?(?P<values>(?:0[xX])?[0-9a-fA-F]+|[0-9]+)")
+
+    for line_number, line in enumerate(androidaudiocommonbaseheaderFile):
+        match = criteria_pattern.match(line)
+        if match:
+            logging.debug("The following line is VALID: {}:{}\n{}".format(
+                androidaudiocommonbaseheaderFile.name, line_number, line))
+
+            criterion_name = criterion_mapping_table[match.groupdict()['type']]
+            criterion_literal = ''.join(match.groupdict()['literal'])
+            criterion_numerical_value = match.groupdict()['values']
+
+            try:
+                string_int = int(criterion_numerical_value, 0)
+            except ValueError:
+                # Handle the exception
+                logging.info("The value {}:{} is for criterion {} is not a number, ignoring"
+                    .format(criterion_numerical_value, criterion_literal, criterion_name))
+                continue
+
+            # Remove duplicated numerical values
+            if int(criterion_numerical_value, 0) in all_criteria[criterion_name].values():
+                logging.info("criterion {} duplicated values:".format(criterion_name))
+                logging.info("{}:{}".format(criterion_numerical_value, criterion_literal))
+                logging.info("KEEPING LATEST")
+                for key in list(all_criteria[criterion_name]):
+                    if all_criteria[criterion_name][key] == int(criterion_numerical_value, 0):
+                        del all_criteria[criterion_name][key]
+
+            all_criteria[criterion_name][criterion_literal] = int(criterion_numerical_value, 0)
+
+            logging.debug("type:{},".format(criterion_name))
+            logging.debug("iteral:{},".format(criterion_literal))
+            logging.debug("values:{}.".format(criterion_numerical_value))
+
+    return all_criteria
+
+
+def main():
+    logging.root.setLevel(logging.INFO)
+    args = parseArgs()
+
+    all_criteria = parseAndroidAudioFile(args.androidaudiobaseheader,
+                                         args.androidaudiocommonbaseheader)
+
+    address_criteria = parseAndroidAudioPolicyConfigurationFile(args.audiopolicyconfigurationfile)
+
+    criterion_types = args.criteriontypes
+
+    generateXmlCriterionTypesFile(all_criteria, address_criteria, criterion_types, args.outputfile)
+
+# If this file is directly executed
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/services/audiopolicy/engineconfigurable/wrapper/Android.bp b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
index 78d5fa3..506b19b 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/Android.bp
+++ b/services/audiopolicy/engineconfigurable/wrapper/Android.bp
@@ -19,10 +19,12 @@
     ],
     header_libs: [
         "libaudiofoundation_headers",
+        "libaudiopolicycapengine_config_headers",
         "libaudiopolicycommon",
         "libbase_headers",
     ],
     shared_libs: [
+        "libaudiopolicyengine_config",
         "liblog",
         "libmedia_helper",
         "libparameter",
diff --git a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
index 099d55d..0bcde8d 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
+++ b/services/audiopolicy/engineconfigurable/wrapper/ParameterManagerWrapper.cpp
@@ -18,13 +18,13 @@
 //#define LOG_NDEBUG 0
 
 #include "ParameterManagerWrapper.h"
+#include <ParameterMgrFullConnector.h>
 #include <ParameterMgrPlatformConnector.h>
 #include <SelectionCriterionTypeInterface.h>
 #include <SelectionCriterionInterface.h>
 #include <media/convert.h>
 #include <algorithm>
 #include <cutils/bitops.h>
-#include <cutils/config_utils.h>
 #include <cutils/misc.h>
 #include <fstream>
 #include <limits>
@@ -64,31 +64,10 @@
 namespace audio_policy {
 
 const char *const ParameterManagerWrapper::mPolicyPfwDefaultConfFileName =
-    "/etc/parameter-framework/ParameterFrameworkConfigurationPolicy.xml";
+    "/etc/parameter-framework/ParameterFrameworkConfigurationCap.xml";
 const char *const ParameterManagerWrapper::mPolicyPfwVendorConfFileName =
     "/vendor/etc/parameter-framework/ParameterFrameworkConfigurationPolicy.xml";
 
-static const char *const gInputDeviceCriterionName = "AvailableInputDevices";
-static const char *const gOutputDeviceCriterionName = "AvailableOutputDevices";
-static const char *const gPhoneStateCriterionName = "TelephonyMode";
-static const char *const gOutputDeviceAddressCriterionName = "AvailableOutputDevicesAddresses";
-static const char *const gInputDeviceAddressCriterionName = "AvailableInputDevicesAddresses";
-
-/**
- * Order MUST be align with defintiion of audio_policy_force_use_t within audio_policy.h
- */
-static const char *const gForceUseCriterionTag[AUDIO_POLICY_FORCE_USE_CNT] =
-{
-    [AUDIO_POLICY_FORCE_FOR_COMMUNICATION] =        "ForceUseForCommunication",
-    [AUDIO_POLICY_FORCE_FOR_MEDIA] =                "ForceUseForMedia",
-    [AUDIO_POLICY_FORCE_FOR_RECORD] =               "ForceUseForRecord",
-    [AUDIO_POLICY_FORCE_FOR_DOCK] =                 "ForceUseForDock",
-    [AUDIO_POLICY_FORCE_FOR_SYSTEM] =               "ForceUseForSystem",
-    [AUDIO_POLICY_FORCE_FOR_HDMI_SYSTEM_AUDIO] =    "ForceUseForHdmiSystemAudio",
-    [AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND] =     "ForceUseForEncodedSurround",
-    [AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING] =      "ForceUseForVibrateRinging"
-};
-
 template <>
 struct ParameterManagerWrapper::parameterManagerElementSupported<ISelectionCriterionInterface> {};
 template <>
@@ -100,9 +79,9 @@
 {
     // Connector
     if (access(mPolicyPfwVendorConfFileName, R_OK) == 0) {
-        mPfwConnector = new CParameterMgrPlatformConnector(mPolicyPfwVendorConfFileName);
+        mPfwConnector = new CParameterMgrFullConnector(mPolicyPfwVendorConfFileName);
     } else {
-        mPfwConnector = new CParameterMgrPlatformConnector(mPolicyPfwDefaultConfFileName);
+        mPfwConnector = new CParameterMgrFullConnector(mPolicyPfwDefaultConfFileName);
     }
 
     // Logger
@@ -130,13 +109,13 @@
               std::get<2>(pair).c_str(), name.c_str());
         criterionType->addValuePair(std::get<0>(pair), std::get<2>(pair), error);
 
-        if (name == gOutputDeviceCriterionName) {
+        if (name == capEngineConfig::gOutputDeviceCriterionName) {
             ALOGV("%s: Adding mOutputDeviceToCriterionTypeMap %d %" PRIu64" for criterionType %s",
                   __func__, std::get<1>(pair), std::get<0>(pair), name.c_str());
             audio_devices_t androidType = static_cast<audio_devices_t>(std::get<1>(pair));
             mOutputDeviceToCriterionTypeMap[androidType] = std::get<0>(pair);
         }
-        if (name == gInputDeviceCriterionName) {
+        if (name == capEngineConfig::gInputDeviceCriterionName) {
             ALOGV("%s: Adding mInputDeviceToCriterionTypeMap %d %" PRIu64" for criterionType %s",
                   __func__, std::get<1>(pair), std::get<0>(pair), name.c_str());
             audio_devices_t androidType = static_cast<audio_devices_t>(std::get<1>(pair));
@@ -207,10 +186,11 @@
 
 status_t ParameterManagerWrapper::setPhoneState(audio_mode_t mode)
 {
-    ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gPhoneStateCriterionName, mPolicyCriteria);
+    ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gPhoneStateCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gPhoneStateCriterionName);
+        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+              capEngineConfig::gPhoneStateCriterionName);
         return BAD_VALUE;
     }
     if (!isValueValidForCriterion(criterion, static_cast<int>(mode))) {
@@ -223,10 +203,11 @@
 
 audio_mode_t ParameterManagerWrapper::getPhoneState() const
 {
-    const ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gPhoneStateCriterionName, mPolicyCriteria);
+    const ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gPhoneStateCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gPhoneStateCriterionName);
+        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+              capEngineConfig::gPhoneStateCriterionName);
         return AUDIO_MODE_NORMAL;
     }
     return static_cast<audio_mode_t>(criterion->getCriterionState());
@@ -240,10 +221,11 @@
         return BAD_VALUE;
     }
 
-    ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gForceUseCriterionTag[usage], mPolicyCriteria);
+    ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gForceUseCriterionTag[usage], mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gForceUseCriterionTag[usage]);
+        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+              capEngineConfig::gForceUseCriterionTag[usage]);
         return BAD_VALUE;
     }
     if (!isValueValidForCriterion(criterion, static_cast<int>(config))) {
@@ -260,10 +242,11 @@
     if (usage > AUDIO_POLICY_FORCE_USE_CNT) {
         return AUDIO_POLICY_FORCE_NONE;
     }
-    const ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gForceUseCriterionTag[usage], mPolicyCriteria);
+    const ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gForceUseCriterionTag[usage], mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __FUNCTION__, gForceUseCriterionTag[usage]);
+        ALOGE("%s: no criterion found for %s", __FUNCTION__,
+              capEngineConfig::gForceUseCriterionTag[usage]);
         return AUDIO_POLICY_FORCE_NONE;
     }
     return static_cast<audio_policy_forced_cfg_t>(criterion->getCriterionState());
@@ -281,8 +264,8 @@
         audio_devices_t type, const std::string &address, audio_policy_dev_state_t state)
 {
     std::string criterionName = audio_is_output_device(type) ?
-                gOutputDeviceAddressCriterionName : gInputDeviceAddressCriterionName;
-
+            capEngineConfig::gOutputDeviceAddressCriterionName :
+            capEngineConfig::gInputDeviceAddressCriterionName;
     ALOGV("%s: device with address %s %s", __FUNCTION__, address.c_str(),
           state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE? "disconnected" : "connected");
     ISelectionCriterionInterface *criterion =
@@ -311,12 +294,12 @@
     return NO_ERROR;
 }
 
-status_t ParameterManagerWrapper::setAvailableInputDevices(const DeviceTypeSet &types)
-{
-    ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gInputDeviceCriterionName, mPolicyCriteria);
+status_t ParameterManagerWrapper::setAvailableInputDevices(const DeviceTypeSet &types) {
+    ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gInputDeviceCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __func__, gInputDeviceCriterionName);
+        ALOGE("%s: no criterion found for %s", __func__,
+              capEngineConfig::gInputDeviceCriterionName);
         return DEAD_OBJECT;
     }
     criterion->setCriterionState(convertDeviceTypesToCriterionValue(types));
@@ -324,12 +307,12 @@
     return NO_ERROR;
 }
 
-status_t ParameterManagerWrapper::setAvailableOutputDevices(const DeviceTypeSet &types)
-{
-    ISelectionCriterionInterface *criterion =
-            getElement<ISelectionCriterionInterface>(gOutputDeviceCriterionName, mPolicyCriteria);
+status_t ParameterManagerWrapper::setAvailableOutputDevices(const DeviceTypeSet &types) {
+    ISelectionCriterionInterface *criterion = getElement<ISelectionCriterionInterface>(
+            capEngineConfig::gOutputDeviceCriterionName, mPolicyCriteria);
     if (criterion == NULL) {
-        ALOGE("%s: no criterion found for %s", __func__, gOutputDeviceCriterionName);
+        ALOGE("%s: no criterion found for %s", __func__,
+              capEngineConfig::gOutputDeviceCriterionName);
         return DEAD_OBJECT;
     }
     criterion->setCriterionState(convertDeviceTypesToCriterionValue(types));
@@ -344,21 +327,15 @@
 
 uint64_t ParameterManagerWrapper::convertDeviceTypeToCriterionValue(audio_devices_t type) const {
     bool isOut = audio_is_output_devices(type);
-    uint32_t typeMask = isOut ? type : (type & ~AUDIO_DEVICE_BIT_IN);
-
     const auto &adapters = isOut ? mOutputDeviceToCriterionTypeMap : mInputDeviceToCriterionTypeMap;
-    // Only multibit devices need adaptation.
-    if (popcount(typeMask) > 1) {
-        const auto &adapter = adapters.find(type);
-        if (adapter != adapters.end()) {
-            ALOGV("%s: multibit device %d converted to criterion %" PRIu64, __func__, type,
-                  adapter->second);
-            return adapter->second;
-        }
-        ALOGE("%s: failed to find map for multibit device %d", __func__, type);
-        return 0;
+    const auto &adapter = adapters.find(type);
+    if (adapter != adapters.end()) {
+        ALOGV("%s: multibit device %d converted to criterion %" PRIu64, __func__, type,
+              adapter->second);
+        return adapter->second;
     }
-    return typeMask;
+    ALOGE("%s: failed to find map for multibit device %d", __func__, type);
+    return 0;
 }
 
 uint64_t ParameterManagerWrapper::convertDeviceTypesToCriterionValue(
@@ -382,5 +359,88 @@
     return deviceTypes;
 }
 
+void ParameterManagerWrapper::createDomain(const std::string &domain)
+{
+    std::string error;
+    bool ret = mPfwConnector->createDomain(domain, error);
+    if (!ret) {
+        ALOGD("%s: failed to create domain %s (error=%s)", __func__, domain.c_str(),
+        error.c_str());
+    }
+}
+
+void ParameterManagerWrapper::addConfigurableElementToDomain(const std::string &domain,
+        const std::string &elementPath)
+{
+    std::string error;
+    bool ret = mPfwConnector->addConfigurableElementToDomain(domain, elementPath, error);
+    ALOGE_IF(!ret, "%s: failed to add parameter %s for domain %s (error=%s)",
+              __func__, elementPath.c_str(), domain.c_str(), error.c_str());
+}
+
+void ParameterManagerWrapper::createConfiguration(const std::string &domain,
+        const std::string &configurationName)
+{
+    std::string error;
+    bool ret = mPfwConnector->createConfiguration(domain, configurationName, error);
+    ALOGE_IF(!ret, "%s: failed to create configuration %s for domain %s (error=%s)",
+              __func__, configurationName.c_str(), domain.c_str(), error.c_str());
+}
+
+void ParameterManagerWrapper::setApplicationRule(
+        const std::string &domain, const std::string &configurationName, const std::string &rule)
+{
+    std::string error;
+    bool ret = mPfwConnector->setApplicationRule(domain, configurationName, rule, error);
+    ALOGE_IF(!ret, "%s: failed to set rule %s for domain %s and configuration %s (error=%s)",
+              __func__, rule.c_str(), domain.c_str(), configurationName.c_str(), error.c_str());
+}
+
+void ParameterManagerWrapper::accessConfigurationValue(const std::string &domain,
+        const std::string &configurationName, const std::string &elementPath,
+        std::string &value)
+{
+    std::string error;
+    bool ret = mPfwConnector->accessConfigurationValue(domain, configurationName, elementPath,
+            value, /*set=*/ true, error);
+    ALOGE_IF(!ret, "%s: failed to set value %s for parameter %s on domain %s and configuration %s "
+          "(error=%s)", __func__, value.c_str(), elementPath.c_str(),  domain.c_str(),
+          configurationName.c_str(), error.c_str());
+}
+
+status_t ParameterManagerWrapper::setConfiguration(
+        const android::capEngineConfig::ParsingResult& capSettings)
+{
+    if (!isStarted()) {
+        return NO_INIT;
+    }
+    std::string error;
+    if (!mPfwConnector->setTuningMode(/* bOn= */ true, error)) {
+        ALOGD("%s: failed to set Tuning Mode error=%s", __FUNCTION__, error.c_str());
+        return DEAD_OBJECT;
+    }
+    for (auto &domain: capSettings.parsedConfig->capConfigurableDomains) {
+        createDomain(domain.name);
+        for (const auto &configurableElementValue : domain.settings[0].configurableElementValues) {
+            addConfigurableElementToDomain(domain.name,
+                    configurableElementValue.configurableElement.path);
+        }
+        for (const auto &configuration : domain.configurations) {
+            createConfiguration(domain.name, configuration.name);
+            setApplicationRule(domain.name, configuration.name, configuration.rule);
+        }
+        for (const auto &setting : domain.settings) {
+            for (const auto &configurableElementValue : setting.configurableElementValues) {
+                std::string value = configurableElementValue.value;
+                accessConfigurationValue(domain.name, setting.configurationName,
+                        configurableElementValue.configurableElement.path, value);
+            }
+
+        }
+    }
+    mPfwConnector->setTuningMode(/* bOn= */ false, error);
+    return OK;
+}
+
 } // namespace audio_policy
 } // namespace android
diff --git a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
index fa4ae1e..0c45a60 100644
--- a/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
+++ b/services/audiopolicy/engineconfigurable/wrapper/include/ParameterManagerWrapper.h
@@ -16,6 +16,7 @@
 
 #pragma once
 
+#include <CapEngineConfig.h>
 #include <media/AudioContainers.h>
 #include <system/audio.h>
 #include <system/audio_policy.h>
@@ -27,7 +28,7 @@
 #include <string>
 #include <vector>
 
-class CParameterMgrPlatformConnector;
+class CParameterMgrFullConnector;
 class ISelectionCriterionInterface;
 struct cnode;
 
@@ -59,6 +60,9 @@
      */
     status_t start(std::string &error);
 
+    status_t setConfiguration(const android::capEngineConfig::ParsingResult& capSettings);
+
+
     /**
      * The following API wrap policy action to criteria
      */
@@ -148,6 +152,14 @@
             uint64_t criterionValue, bool isOut) const;
 
 private:
+    void createDomain(const std::string &domain);
+    void addConfigurableElementToDomain(const std::string &domain, const std::string &elementPath);
+    void createConfiguration(const std::string &domain, const std::string &configurationName);
+    void setApplicationRule(const std::string &domain, const std::string &configurationName,
+            const std::string &rule);
+    void accessConfigurationValue(const std::string &domain, const std::string &configurationName,
+                                  const std::string &elementPath, std::string &value);
+
     /**
      * Apply the configuration of the platform on the policy parameter manager.
      * Once all the criteria have been set, the client of the platform state must call
@@ -206,7 +218,7 @@
 
     Criteria mPolicyCriteria; /**< Policy Criterion Map. */
 
-    CParameterMgrPlatformConnector *mPfwConnector; /**< Policy Parameter Manager connector. */
+    CParameterMgrFullConnector *mPfwConnector; /**< Policy Parameter Manager connector. */
     ParameterMgrPlatformConnectorLogger *mPfwConnectorLogger; /**< Policy PFW logger. */
 
 
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 7810d63..aec8c16 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -30,6 +30,7 @@
         "libaudiopolicyengine_config",
     ],
     shared_libs: [
+        "com.android.media.audioserver-aconfig-cc",
         "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
         "libaudiopolicy",
@@ -41,4 +42,7 @@
         "libutils",
         "libxml2",
     ],
+    defaults: [
+        "aconfig_lib_cc_static_link.defaults",
+    ],
 }
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 13cc165..7de6939 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -30,6 +30,7 @@
 #include <PolicyAudioPort.h>
 #include <IOProfile.h>
 #include <AudioIODescriptorInterface.h>
+#include <com_android_media_audioserver.h>
 #include <policy.h>
 #include <media/AudioContainers.h>
 #include <utils/String8.h>
@@ -50,7 +51,7 @@
         { "STRATEGY_TRANSMITTED_THROUGH_SPEAKER", STRATEGY_TRANSMITTED_THROUGH_SPEAKER },
         { "STRATEGY_ACCESSIBILITY", STRATEGY_ACCESSIBILITY },
         { "STRATEGY_REROUTING", STRATEGY_REROUTING },
-        { "STRATEGY_PATCH", STRATEGY_REROUTING }, // boiler to manage stream patch volume
+        { "STRATEGY_PATCH", STRATEGY_PATCH }, // boiler to manage stream patch volume
         { "STRATEGY_CALL_ASSISTANT", STRATEGY_CALL_ASSISTANT },
     };
     return legacyStrategy;
@@ -142,7 +143,8 @@
         }
         break;
     case AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING:
-        if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_NONE) {
+        if (config != AUDIO_POLICY_FORCE_BT_SCO && config != AUDIO_POLICY_FORCE_BT_BLE
+                && config != AUDIO_POLICY_FORCE_NONE) {
             ALOGW("setForceUse() invalid config %d for VIBRATE_RINGING", config);
             return BAD_VALUE;
         }
@@ -154,12 +156,58 @@
     return EngineBase::setForceUse(usage, config);
 }
 
+bool Engine::isBtScoActive(DeviceVector& availableOutputDevices,
+                           const SwAudioOutputCollection &outputs) const {
+    if (availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
+        return false;
+    }
+    // SCO is active if:
+    // 1) we are in a call and SCO is the preferred device for PHONE strategy
+    if (isInCall() && audio_is_bluetooth_out_sco_device(
+            getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE))) {
+        return true;
+    }
+
+    // 2) A strategy for which the preferred device is SCO is active
+    for (const auto &ps : getOrderedProductStrategies()) {
+        if (outputs.isStrategyActive(ps) &&
+            !getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps)
+                .getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
+            return true;
+        }
+    }
+    // 3) a ringtone is active and SCO is used for ringing
+    if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_RING))
+          && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+                    == AUDIO_POLICY_FORCE_BT_SCO)) {
+        return true;
+    }
+    // 4) an active input is routed from SCO
+    DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
+    const auto &inputs = getApmObserver()->getInputs();
+    if (inputs.activeInputsCountOnDevices(availableInputDevices.getDevicesFromType(
+            AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) > 0) {
+        return true;
+    }
+    return false;
+}
+
 void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
                                             DeviceVector& availableOutputDevices,
                                             const SwAudioOutputCollection &outputs) const
 {
     DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
 
+    if (com::android::media::audioserver::use_bt_sco_for_media()) {
+        // remove A2DP and LE Audio devices whenever BT SCO is in use
+        if (isBtScoActive(availableOutputDevices, outputs)) {
+            availableOutputDevices.remove(
+                availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllA2dpSet()));
+            availableOutputDevices.remove(
+                availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()));
+        }
+    }
+
     switch (strategy) {
     case STRATEGY_SONIFICATION_RESPECTFUL: {
         if (!(isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL)))) {
@@ -355,6 +403,40 @@
                 }
             }
         }
+
+        // if LEA headset is connected and we are told to use it, play ringtone over
+        // speaker and BT LEA
+        if (!availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()).isEmpty()) {
+            DeviceVector devices2;
+            devices2 = availableOutputDevices.getFirstDevicesFromTypes({
+                    AUDIO_DEVICE_OUT_BLE_HEADSET, AUDIO_DEVICE_OUT_BLE_SPEAKER});
+            // Use ONLY Bluetooth LEA output when ringing in vibration mode
+            if (!((getForceUse(AUDIO_POLICY_FORCE_FOR_SYSTEM) == AUDIO_POLICY_FORCE_SYSTEM_ENFORCED)
+                    && (strategy == STRATEGY_ENFORCED_AUDIBLE))) {
+                if (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+                        == AUDIO_POLICY_FORCE_BT_BLE) {
+                    if (!devices2.isEmpty()) {
+                        devices = devices2;
+                        break;
+                    }
+                }
+            }
+            // Use both Bluetooth LEA and phone default output when ringing in normal mode
+            if (audio_is_ble_out_device(getPreferredDeviceTypeForLegacyStrategy(
+                    availableOutputDevices, STRATEGY_PHONE))) {
+                if (strategy == STRATEGY_SONIFICATION) {
+                    devices.replaceDevicesByType(
+                            AUDIO_DEVICE_OUT_SPEAKER,
+                            availableOutputDevices.getDevicesFromType(
+                                    AUDIO_DEVICE_OUT_SPEAKER_SAFE));
+                }
+                if (!devices2.isEmpty()) {
+                    devices.add(devices2);
+                    break;
+                }
+            }
+        }
+
         // The second device used for sonification is the same as the device used by media strategy
         FALLTHROUGH_INTENDED;
 
@@ -381,10 +463,13 @@
 
         // LE audio broadcast device is only used if:
         // - No call is active
-        // - either MEDIA or SONIFICATION_RESPECTFUL is the highest priority active strategy
-        //   OR the LE audio unicast device is not active
+        // - the highest priority active strategy is not PHONE or TRANSMITTED_THROUGH_SPEAKER
+        // OR the LE audio unicast device is not active
         if (devices2.isEmpty() && !isInCall()
-                && (strategy == STRATEGY_MEDIA || strategy == STRATEGY_SONIFICATION_RESPECTFUL)) {
+                // also skipping routing queries from PHONE and TRANSMITTED_THROUGH_SPEAKER here
+                // so this code is not dependent on breaks for other strategies above
+                && (strategy != STRATEGY_PHONE)
+                && (strategy != STRATEGY_TRANSMITTED_THROUGH_SPEAKER)) {
             legacy_strategy topActiveStrategy = STRATEGY_NONE;
             for (const auto &ps : getOrderedProductStrategies()) {
                 if (outputs.isStrategyActive(ps)) {
@@ -394,8 +479,8 @@
                 }
             }
 
-            if (topActiveStrategy == STRATEGY_NONE || topActiveStrategy == STRATEGY_MEDIA
-                    || topActiveStrategy == STRATEGY_SONIFICATION_RESPECTFUL
+            if ((topActiveStrategy != STRATEGY_PHONE
+                        && topActiveStrategy != STRATEGY_TRANSMITTED_THROUGH_SPEAKER)
                     || !outputs.isAnyDeviceTypeActive(getAudioDeviceOutLeAudioUnicastSet())) {
                 devices2 =
                         availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_BLE_BROADCAST);
@@ -418,15 +503,27 @@
                         getLastRemovableMediaDevices(GROUP_WIRED, excludedDevices));
             }
         }
+
+        if (com::android::media::audioserver::use_bt_sco_for_media()) {
+            if (devices2.isEmpty() && isBtScoActive(availableOutputDevices, outputs)) {
+                devices2 = availableOutputDevices.getFirstDevicesFromTypes(
+                        { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+                          AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+                          AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+            }
+        }
+
         if ((devices2.isEmpty()) &&
                 (getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK) == AUDIO_POLICY_FORCE_ANALOG_DOCK)) {
             devices2 = availableOutputDevices.getDevicesFromType(
                     AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET);
         }
+
         if (devices2.isEmpty()) {
             devices2 = availableOutputDevices.getFirstDevicesFromTypes({
                         AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_SPEAKER});
         }
+
         DeviceVector devices3;
         if (strategy == STRATEGY_MEDIA) {
             // ARC, SPDIF and AUX_LINE can co-exist with others.
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 878bca9..862b5fd 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -28,20 +28,6 @@
 namespace audio_policy
 {
 
-enum legacy_strategy {
-    STRATEGY_NONE = -1,
-    STRATEGY_MEDIA,
-    STRATEGY_PHONE,
-    STRATEGY_SONIFICATION,
-    STRATEGY_SONIFICATION_RESPECTFUL,
-    STRATEGY_DTMF,
-    STRATEGY_ENFORCED_AUDIBLE,
-    STRATEGY_TRANSMITTED_THROUGH_SPEAKER,
-    STRATEGY_ACCESSIBILITY,
-    STRATEGY_REROUTING,
-    STRATEGY_CALL_ASSISTANT,
-};
-
 class Engine : public EngineBase
 {
 public:
@@ -109,6 +95,9 @@
     DeviceVector getDisabledDevicesForInputSource(
             const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
 
+    bool isBtScoActive(DeviceVector& availableOutputDevices,
+                       const SwAudioOutputCollection &outputs) const;
+
     std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
 };
 } // namespace audio_policy
diff --git a/services/audiopolicy/fuzzer/Android.bp b/services/audiopolicy/fuzzer/Android.bp
index 8cee613..30d4403 100644
--- a/services/audiopolicy/fuzzer/Android.bp
+++ b/services/audiopolicy/fuzzer/Android.bp
@@ -28,39 +28,18 @@
 
 cc_fuzz {
     name: "audiopolicy_fuzzer",
+    defaults: [
+        "libaudiopolicyservice_dependencies",
+    ],
     srcs: [
         "audiopolicy_fuzzer.cpp",
     ],
-    include_dirs: [
-        "frameworks/av/services/audiopolicy",
-    ],
-    shared_libs: [
-        "android.hardware.audio.common-util",
-        "capture_state_listener-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libaudioclient",
-        "libaudiofoundation",
-        "libaudiopolicy",
-        "libaudiopolicycomponents",
-        "libaudiopolicymanagerdefault",
-        "libbase",
-        "libbinder",
-        "libcutils",
-        "libdl",
-        "libhidlbase",
-        "liblog",
-        "libmedia_helper",
-        "libmediametrics",
-        "libutils",
-        "libxml2",
-    ],
     static_libs: [
         "android.hardware.audio.common@7.0-enums",
     ],
-    header_libs: [
-        "libaudiopolicycommon",
-        "libaudiopolicyengine_interface_headers",
-        "libaudiopolicymanager_interface_headers",
+    include_dirs: [
+        "frameworks/av/services/audiopolicy", // include path outside of libaudiopolicyservice
+        "frameworks/av/services/audiopolicy/engine/interface", // for /tests/AudioPolicyTestManager.h:
     ],
     data: [":audiopolicyfuzzer_configuration_files"],
     fuzz_config: {
diff --git a/services/audiopolicy/fuzzer/aidl/Android.bp b/services/audiopolicy/fuzzer/aidl/Android.bp
index 2c85955..680f76d 100644
--- a/services/audiopolicy/fuzzer/aidl/Android.bp
+++ b/services/audiopolicy/fuzzer/aidl/Android.bp
@@ -23,36 +23,15 @@
 cc_defaults {
     name: "audiopolicy_aidl_fuzzer_defaults",
     shared_libs: [
-        "audiopolicy-aidl-cpp",
-        "audiopolicy-types-aidl-cpp",
-        "framework-permission-aidl-cpp",
-        "libactivitymanager_aidl",
-        "libaudioclient",
         "libaudioflinger",
-        "libaudiohal",
-        "libaudiopolicy",
-        "libaudiopolicymanagerdefault",
         "libaudiopolicyservice",
-        "libaudioprocessing",
-        "libhidlbase",
-        "liblog",
-        "libmediautils",
-        "libnbaio",
-        "libnblog",
-        "libpowermanager",
-        "libvibrator",
-        "packagemanager_aidl-cpp",
+        "libmediaplayerservice",
     ],
     static_libs: [
         "libaudiomockhal",
         "libfakeservicemanager",
-        "libmediaplayerservice",
     ],
     header_libs: [
-        "libaudioflinger_headers",
-        "libaudiohal_headers",
-        "libaudiopolicymanager_interface_headers",
-        "libbinder_headers",
         "libmedia_headers",
     ],
     fuzz_config: {
@@ -77,6 +56,8 @@
         "latest_android_hardware_audio_core_ndk_shared",
         "latest_android_hardware_audio_core_sounddose_ndk_shared",
         "latest_android_hardware_audio_effect_ndk_shared",
+        "libaudioflinger_dependencies",
+        "libaudiopolicyservice_dependencies",
         "service_fuzzer_defaults",
     ],
 }
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..4d539b7
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..8af7d2f
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..b89b77e
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..6e966e9
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..8ccf24d
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..223d1df
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..ad54b83
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7 b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..f4eabf4
--- /dev/null
+++ b/services/audiopolicy/fuzzer/aidl/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
index 6416a47..fd40c04 100644
--- a/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
+++ b/services/audiopolicy/fuzzer/audiopolicy_fuzzer.cpp
@@ -265,6 +265,7 @@
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
     bool isBitPerfect;
+    float volume;
 
     // TODO b/182392769: use attribution source util
     AttributionSourceState attributionSource;
@@ -272,7 +273,7 @@
     attributionSource.token = sp<BBinder>::make();
     if (mManager->getOutputForAttr(&attr, output, AUDIO_SESSION_NONE, &stream, attributionSource,
             &config, &flags, selectedDeviceId, portId, {}, &outputType, &isSpatialized,
-            &isBitPerfect) != OK) {
+            &isBitPerfect, &volume) != OK) {
         return false;
     }
     if (*output == AUDIO_IO_HANDLE_NONE || *portId == AUDIO_PORT_HANDLE_NONE) {
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 6e9bd34..3b7406a 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -45,7 +45,6 @@
 #include <android_media_audiopolicy.h>
 #include <com_android_media_audioserver.h>
 #include <cutils/bitops.h>
-#include <cutils/properties.h>
 #include <media/AudioParameter.h>
 #include <policy.h>
 #include <private/android_filesystem_config.h>
@@ -55,6 +54,7 @@
 #include <utils/Log.h>
 
 #include "AudioPolicyManager.h"
+#include "SpatializerHelper.h"
 #include "TypeConverter.h"
 
 namespace android {
@@ -66,6 +66,7 @@
 using android::media::audio::common::AudioDeviceAddress;
 using android::media::audio::common::AudioPortDeviceExt;
 using android::media::audio::common::AudioPortExt;
+using com::android::media::audioserver::fix_call_audio_patch;
 using content::AttributionSourceState;
 
 //FIXME: workaround for truncated touch sounds
@@ -122,17 +123,16 @@
     }
 }
 
-void AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
+status_t AudioPolicyManager::broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
                                                         media::DeviceConnectedState state)
 {
     audio_port_v7 devicePort;
     device->toAudioPort(&devicePort);
-    if (status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
-            status != OK) {
-        ALOGE("Error %d while setting connected state %d for device %s",
-                status, static_cast<int>(state),
-                device->getDeviceTypeAddr().toString(false).c_str());
-    }
+    status_t status = mpClientInterface->setDeviceConnectedState(&devicePort, state);
+    ALOGE_IF(status != OK, "Error %d while setting connected state %d for device %s", status,
+             static_cast<int>(state), device->getDeviceTypeAddr().toString(false).c_str());
+
+    return status;
 }
 
 status_t AudioPolicyManager::setDeviceConnectionStateInt(
@@ -213,7 +213,14 @@
 
             // Before checking outputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the outputs...)
-            broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
+            if (broadcastDeviceConnectionState(
+                        device, media::DeviceConnectedState::CONNECTED) != NO_ERROR) {
+                mAvailableOutputDevices.remove(device);
+                mHwModules.cleanUpForDevice(device);
+                ALOGE("%s() device %s format %x connection failed", __func__,
+                      device->toString().c_str(), device->getEncodedFormat());
+                return INVALID_OPERATION;
+            }
 
             if (checkOutputsForDevice(device, state, outputs) != NO_ERROR) {
                 mAvailableOutputDevices.remove(device);
@@ -338,7 +345,7 @@
                         && (!device_distinguishes_on_address(device->type())
                                 // always force when disconnecting (a non-duplicated device)
                                 || (state == AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE));
-                if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
+                if (desc->mPreferredAttrInfo != nullptr && newDevices != desc->devices()) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices
@@ -374,6 +381,7 @@
         checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, 0);
 
         mpClientInterface->onAudioPortListUpdate();
+        ALOGV("%s() completed for device: %s", __func__, device->toString().c_str());
         return NO_ERROR;
     }  // end if is output device
 
@@ -389,15 +397,28 @@
                 return INVALID_OPERATION;
             }
 
+            ALOGV("%s() connecting device %s", __func__, device->toString().c_str());
+
             if (mAvailableInputDevices.add(device) < 0) {
                 return NO_MEMORY;
             }
 
             // Before checking intputs, broadcast connect event to allow HAL to retrieve dynamic
             // parameters on newly connected devices (instead of opening the inputs...)
-            broadcastDeviceConnectionState(device, media::DeviceConnectedState::CONNECTED);
+            if (broadcastDeviceConnectionState(
+                        device, media::DeviceConnectedState::CONNECTED) != NO_ERROR) {
+                mAvailableInputDevices.remove(device);
+                mHwModules.cleanUpForDevice(device);
+                ALOGE("%s() device %s format %x connection failed", __func__,
+                      device->toString().c_str(), device->getEncodedFormat());
+                return INVALID_OPERATION;
+            }
+            // Propagate device availability to Engine
+            setEngineDeviceConnectionState(device, state);
 
             if (checkInputsForDevice(device, state) != NO_ERROR) {
+                setEngineDeviceConnectionState(device, AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE);
+
                 mAvailableInputDevices.remove(device);
 
                 broadcastDeviceConnectionState(device, media::DeviceConnectedState::DISCONNECTED);
@@ -431,6 +452,9 @@
 
             // remove device from mReportedFormatsMap cache
             mReportedFormatsMap.erase(device);
+
+            // Propagate device availability to Engine
+            setEngineDeviceConnectionState(device, state);
         } break;
 
         default:
@@ -438,9 +462,6 @@
             return BAD_VALUE;
         }
 
-        // Propagate device availability to Engine
-        setEngineDeviceConnectionState(device, state);
-
         checkCloseInputs();
         // As the input device list can impact the output device selection, update
         // getDeviceForStrategy() cache
@@ -457,6 +478,7 @@
         }
 
         mpClientInterface->onAudioPortListUpdate();
+        ALOGV("%s() completed for device: %s", __func__, device->toString().c_str());
         return NO_ERROR;
     } // end if is input device
 
@@ -563,15 +585,31 @@
         }
     }
     auto musicStrategy = streamToStrategy(AUDIO_STREAM_MUSIC);
+    uint32_t muteWaitMs = 0;
     for (size_t i = 0; i < mOutputs.size(); i++) {
        sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
-       // mute media strategies and delay device switch by the largest
-       // This avoid sending the music tail into the earpiece or headset.
+       // mute media strategies to avoid sending the music tail into
+       // the earpiece or headset.
+       if (desc->isStrategyActive(musicStrategy)) {
+           uint32_t tempRecommendedMuteDuration = desc->getRecommendedMuteDurationMs();
+           uint32_t tempMuteDurationMs = tempRecommendedMuteDuration > 0 ?
+                        tempRecommendedMuteDuration : desc->latency() * 4;
+           if (muteWaitMs < tempMuteDurationMs) {
+               muteWaitMs = tempMuteDurationMs;
+           }
+       }
        setStrategyMute(musicStrategy, true, desc);
        setStrategyMute(musicStrategy, false, desc, MUTE_TIME_MS,
           mEngine->getOutputDevicesForAttributes(attributes_initializer(AUDIO_USAGE_MEDIA),
                                               nullptr, true /*fromCache*/).types());
     }
+    // Wait for the muted audio to propagate down the audio path see checkDeviceMuteStrategies().
+    // We assume that MUTE_TIME_MS is way larger than muteWaitMs so that unmuting still
+    // happens after the actual device switch.
+    if (muteWaitMs > 0) {
+        ALOGW_IF(MUTE_TIME_MS < muteWaitMs * 2, "%s excessive mute wait %d", __func__, muteWaitMs);
+        usleep(muteWaitMs * 1000);
+    }
     // Toggle the device state: UNAVAILABLE -> AVAILABLE
     // This will force reading again the device configuration
     status_t status = setDeviceConnectionState(device,
@@ -684,8 +722,10 @@
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
 
-    disconnectTelephonyAudioSource(mCallRxSourceClient);
-    disconnectTelephonyAudioSource(mCallTxSourceClient);
+    if (!fix_call_audio_patch()) {
+        disconnectTelephonyAudioSource(mCallRxSourceClient);
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
+    }
 
     if (rxDevices.isEmpty()) {
         ALOGW("%s() no selected output device", __func__);
@@ -738,13 +778,16 @@
     // Use legacy routing method for voice calls via setOutputDevice() on primary output.
     // Otherwise, create two audio patches for TX and RX path.
     if (!createRxPatch) {
+        if (fix_call_audio_patch()) {
+            disconnectTelephonyAudioSource(mCallRxSourceClient);
+        }
         if (!hasPrimaryOutput()) {
             ALOGW("%s() no primary output available", __func__);
             return INVALID_OPERATION;
         }
         muteWaitMs = setOutputDevices(__func__, mPrimaryOutput, rxDevices, true, delayMs);
     } else { // create RX path audio patch
-        connectTelephonyRxAudioSource();
+        connectTelephonyRxAudioSource(delayMs);
         // If the TX device is on the primary HW module but RX device is
         // on other HW module, SinkMetaData of telephony input should handle it
         // assuming the device uses audio HAL V5.0 and above
@@ -760,6 +803,8 @@
             }
         }
         connectTelephonyTxAudioSource(txSourceDevice, txSinkDevice, delayMs);
+    } else if (fix_call_audio_patch()) {
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
     }
     if (waitMs != nullptr) {
         *waitMs = muteWaitMs;
@@ -779,19 +824,40 @@
     return false;
 }
 
-void AudioPolicyManager::connectTelephonyRxAudioSource()
+void AudioPolicyManager::connectTelephonyRxAudioSource(uint32_t delayMs)
 {
-    disconnectTelephonyAudioSource(mCallRxSourceClient);
+    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
+    if (fix_call_audio_patch()) {
+        if (mCallRxSourceClient != nullptr) {
+            DeviceVector rxDevices =
+                  mEngine->getOutputDevicesForAttributes(aa, nullptr, false /*fromCache*/);
+            ALOG_ASSERT(!rxDevices.isEmpty() || !mCallRxSourceClient->isConnected(),
+                        "connectTelephonyRxAudioSource(): no device found for call RX source");
+            sp<DeviceDescriptor> rxDevice = rxDevices.itemAt(0);
+            if (mCallRxSourceClient->isConnected()
+                    && mCallRxSourceClient->sinkDevice()->equals(rxDevice)) {
+                return;
+            }
+            disconnectTelephonyAudioSource(mCallRxSourceClient);
+        }
+    } else {
+        disconnectTelephonyAudioSource(mCallRxSourceClient);
+    }
+
     const struct audio_port_config source = {
         .role = AUDIO_PORT_ROLE_SOURCE, .type = AUDIO_PORT_TYPE_DEVICE,
         .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
     };
-    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
-
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    status_t status = startAudioSource(&source, &aa, &portId, 0 /*uid*/, true /*internal*/);
+
+    status_t status = startAudioSourceInternal(&source, &aa, &portId, 0 /*uid*/,
+                                       true /*internal*/, true /*isCallRx*/, delayMs);
     ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
     mCallRxSourceClient = mAudioSources.valueFor(portId);
+    ALOGV("%s portdID %d between source %s and sink %s", __func__, portId,
+        mCallRxSourceClient->srcDevice()->toString().c_str(),
+        mCallRxSourceClient->sinkDevice()->toString().c_str());
     ALOGE_IF(mCallRxSourceClient == nullptr,
              "%s failed to start Telephony Rx AudioSource", __func__);
 }
@@ -810,15 +876,26 @@
         const sp<DeviceDescriptor> &srcDevice, const sp<DeviceDescriptor> &sinkDevice,
         uint32_t delayMs)
 {
-    disconnectTelephonyAudioSource(mCallTxSourceClient);
     if (srcDevice == nullptr || sinkDevice == nullptr) {
         ALOGW("%s could not create patch, invalid sink and/or source device(s)", __func__);
         return;
     }
+
+    if (fix_call_audio_patch()) {
+        if (mCallTxSourceClient != nullptr) {
+            if (mCallTxSourceClient->isConnected()
+                    && mCallTxSourceClient->srcDevice()->equals(srcDevice)) {
+                return;
+            }
+            disconnectTelephonyAudioSource(mCallTxSourceClient);
+        }
+    } else {
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
+    }
+
     PatchBuilder patchBuilder;
     patchBuilder.addSource(srcDevice).addSink(sinkDevice);
-    ALOGV("%s between source %s and sink %s", __func__,
-            srcDevice->toString().c_str(), sinkDevice->toString().c_str());
+
     auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
 
@@ -826,7 +903,8 @@
     srcDevice->toAudioPortConfig(&source);
     mCallTxSourceClient = new SourceClientDescriptor(
                 callTxSourceClientPortId, mUidCached, aa, source, srcDevice, AUDIO_STREAM_PATCH,
-                mCommunnicationStrategy, toVolumeSource(aa), true);
+                mCommunnicationStrategy, toVolumeSource(aa), true,
+                false /*isCallRx*/, true /*isCallTx*/);
     mCallTxSourceClient->setPreferredDeviceId(sinkDevice->getId());
 
     audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
@@ -834,6 +912,8 @@
                 mCallTxSourceClient, sinkDevice, patchBuilder.patch(), patchHandle, mUidCached,
                 delayMs);
     ALOGE_IF(status != NO_ERROR, "%s() error %d creating TX audio patch", __func__, status);
+    ALOGV("%s portdID %d between source %s and sink %s", __func__, callTxSourceClientPortId,
+        srcDevice->toString().c_str(), sinkDevice->toString().c_str());
     if (status == NO_ERROR) {
         mAudioSources.add(callTxSourceClientPortId, mCallTxSourceClient);
     }
@@ -917,15 +997,15 @@
     for (size_t i = 0; i < mOutputs.size(); i++) {
         sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
         DeviceVector newDevices = getNewOutputDevices(desc, true /*fromCache*/);
+        if (state != AUDIO_MODE_NORMAL && oldState == AUDIO_MODE_NORMAL
+                && desc->mPreferredAttrInfo != nullptr) {
+            // If the output is using preferred mixer attributes and the audio mode is not normal,
+            // the output need to reopen with default configuration.
+            outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
+            continue;
+        }
         if (state != AUDIO_MODE_IN_CALL || (desc != mPrimaryOutput && !isTelephonyRxOrTx(desc))) {
             bool forceRouting = !newDevices.isEmpty();
-            if (desc->mUsePreferredMixerAttributes && newDevices != desc->devices()) {
-                // If the device is using preferred mixer attributes, the output need to reopen
-                // with default configuration when the new selected devices are different from
-                // current routing devices.
-                outputsToReopen.emplace(mOutputs.keyAt(i), newDevices);
-                continue;
-            }
             setOutputDevices(__func__, desc, newDevices, forceRouting, 0 /*delayMs*/, nullptr,
                              true /*requiresMuteCheck*/, !forceRouting /*requiresVolumeCheck*/);
         }
@@ -1127,8 +1207,7 @@
 
     SortedVector<audio_io_handle_t> outputs = getOutputsForDevices(devices, mOutputs);
     audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
-    if (stream == AUDIO_STREAM_MUSIC &&
-        property_get_bool("audio.deep_buffer.media", false /* default_value */)) {
+    if (stream == AUDIO_STREAM_MUSIC && mConfig->useDeepBufferForMedia()) {
         flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
     }
     const audio_io_handle_t output = selectOutput(outputs, flags);
@@ -1225,7 +1304,8 @@
 
     // FIXME: in case of RENDER policy, the output capabilities should be checked
     if ((secondaryMixes != nullptr && !secondaryMixes->empty())
-            && !audio_is_linear_pcm(config->format)) {
+            && (!audio_is_linear_pcm(config->format) ||
+                    *flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)) {
         ALOGD("%s: rejecting request as secondary mixes only support pcm", __func__);
         return BAD_VALUE;
     }
@@ -1250,7 +1330,7 @@
             status = openDirectOutput(
                     *stream, session, config,
                     (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT),
-                    DeviceVector(policyMixDevice), &newOutput);
+                    DeviceVector(policyMixDevice), &newOutput, *resultAttr);
             if (status == NO_ERROR) {
                 policyDesc = mOutputs.valueFor(newOutput);
                 primaryMix->setOutput(policyDesc);
@@ -1337,23 +1417,50 @@
             // Only use preferred mixer if the uid matches or the preferred mixer is bit-perfect
             // and it is currently active.
             if (info != nullptr && info->getUid() != uid &&
-                ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) == AUDIO_OUTPUT_FLAG_NONE ||
-                        info->getActiveClientCount() == 0)) {
+                (!info->isBitPerfect() || info->getActiveClientCount() == 0)) {
                 info = nullptr;
             }
+
+            if (info != nullptr && info->isBitPerfect() &&
+                (*flags & (AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD |
+                        AUDIO_OUTPUT_FLAG_HW_AV_SYNC | AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)) != 0) {
+                // Reject direct request if a preferred mixer config in use is bit-perfect.
+                ALOGD("%s reject direct request as bit-perfect mixer attributes is active",
+                      __func__);
+                return BAD_VALUE;
+            }
+
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                if (info != nullptr && info->getUid() == uid &&
+                    info->configMatches(*config) &&
+                    (mEngine->getPhoneState() != AUDIO_MODE_NORMAL ||
+                            std::any_of(gHighPriorityUseCases.begin(), gHighPriorityUseCases.end(),
+                                        [this, &outputDevices](audio_usage_t usage) {
+                                            return mOutputs.isUsageActiveOnDevice(
+                                                    usage, outputDevices[0]); }))) {
+                    // Bit-perfect request is not allowed when the phone mode is not normal or
+                    // there is any higher priority user case active.
+                    return INVALID_OPERATION;
+                }
+            }
         }
         *output = getOutputForDevices(outputDevices, session, resultAttr, config,
                 flags, isSpatialized, info, resultAttr->flags & AUDIO_FLAG_MUTE_HAPTIC);
         // The client will be active if the client is currently preferred mixer owner and the
         // requested configuration matches the preferred mixer configuration.
         *isBitPerfect = (info != nullptr
-                && (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
+                && info->isBitPerfect()
                 && info->getUid() == uid
                 && *output != AUDIO_IO_HANDLE_NONE
                 // When bit-perfect output is selected for the preferred mixer attributes owner,
                 // only need to consider the config matches.
                 && mOutputs.valueFor(*output)->isConfigurationMatched(
                         clientConfig, AUDIO_OUTPUT_FLAG_NONE));
+
+        if (*isBitPerfect) {
+            *flags = (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+        }
     }
     if (*output == AUDIO_IO_HANDLE_NONE) {
         AudioProfileVector profiles;
@@ -1404,7 +1511,8 @@
                                               std::vector<audio_io_handle_t> *secondaryOutputs,
                                               output_type_t *outputType,
                                               bool *isSpatialized,
-                                              bool *isBitPerfect)
+                                              bool *isBitPerfect,
+                                              float *volume)
 {
     // The supplied portId must be AUDIO_PORT_HANDLE_NONE
     if (*portId != AUDIO_PORT_HANDLE_NONE) {
@@ -1460,6 +1568,8 @@
                                   outputDesc->mPolicyMix);
     outputDesc->addClient(clientDesc);
 
+    *volume = Volume::DbToAmpl(outputDesc->getCurVolume(toVolumeSource(resultAttr)));
+
     ALOGV("%s() returns output %d requestedPortId %d selectedDeviceId %d for port ID %d", __func__,
           *output, requestedPortId, *selectedDeviceId, *portId);
 
@@ -1471,7 +1581,8 @@
                                               const audio_config_t *config,
                                               audio_output_flags_t flags,
                                               const DeviceVector &devices,
-                                              audio_io_handle_t *output) {
+                                              audio_io_handle_t *output,
+                                              audio_attributes_t attributes) {
 
     *output = AUDIO_IO_HANDLE_NONE;
 
@@ -1483,6 +1594,13 @@
         return NAME_NOT_FOUND;
     }
 
+    // Reject flag combinations that do not make sense. Note that the requested flags might not
+    // have the 'DIRECT' flag set, however once a direct-capable profile is found, it will
+    // combine the requested flags with its own flags, yielding an unsupported combination.
+    if ((flags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) != 0) {
+        return NAME_NOT_FOUND;
+    }
+
     // Do not allow offloading if one non offloadable effect is enabled or MasterMono is enabled.
     // This prevents creating an offloaded track and tearing it down immediately after start
     // when audioflinger detects there is an active non offloadable effect.
@@ -1512,7 +1630,7 @@
                 (config->channel_mask == desc->getChannelMask()) &&
                 (session == desc->mDirectClientSession)) {
                 desc->mDirectOpenCount++;
-                ALOGV("%s reusing direct output %d for session %d", __func__,
+                ALOGI("%s reusing direct output %d for session %d", __func__,
                     mOutputs.keyAt(i), session);
                 *output = mOutputs.keyAt(i);
                 return NO_ERROR;
@@ -1522,17 +1640,23 @@
 
     if (!profile->canOpenNewIo()) {
         if (!com::android::media::audioserver::direct_track_reprioritization()) {
+            ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
+                  profile->getName().c_str());
             return NAME_NOT_FOUND;
         } else if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
             // MMAP gracefully handles lack of an exclusive track resource by mixing
             // above the audio framework. For AAudio to know that the limit is reached,
             // return an error.
+            ALOGW("%s profile %s can't open new mmap output maxOpenCount reached", __func__,
+                  profile->getName().c_str());
             return NAME_NOT_FOUND;
         } else {
             // Close outputs on this profile, if available, to free resources for this request
             for (int i = 0; i < mOutputs.size() && !profile->canOpenNewIo(); i++) {
                 const auto desc = mOutputs.valueAt(i);
                 if (desc->mProfile == profile) {
+                    ALOGV("%s closeOutput %d to prioritize session %d on profile %s", __func__,
+                          desc->mIoHandle, session, profile->getName().c_str());
                     closeOutput(desc->mIoHandle);
                 }
             }
@@ -1541,6 +1665,8 @@
 
     // Unable to close streams to find free resources for this request
     if (!profile->canOpenNewIo()) {
+        ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
+              profile->getName().c_str());
         return NAME_NOT_FOUND;
     }
 
@@ -1551,13 +1677,19 @@
     releaseMsdOutputPatches(devices);
 
     status_t status =
-            outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, flags, output);
+            outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, &flags, output,
+                             attributes);
 
-    // only accept an output with the requested parameters
+    // only accept an output with the requested parameters, unless the format can be IEC61937
+    // encapsulated and opened by AudioFlinger as wrapped IEC61937.
+    const bool ignoreRequestedParametersCheck = audio_is_iec61937_compatible(config->format)
+            && (flags & AUDIO_OUTPUT_FLAG_IEC958_NONAUDIO)
+            && audio_has_proportional_frames(outputDesc->getFormat());
     if (status != NO_ERROR ||
-        (config->sample_rate != 0 && config->sample_rate != outputDesc->getSamplingRate()) ||
-        (config->format != AUDIO_FORMAT_DEFAULT && config->format != outputDesc->getFormat()) ||
-        (config->channel_mask != 0 && config->channel_mask != outputDesc->getChannelMask())) {
+        (!ignoreRequestedParametersCheck &&
+        ((config->sample_rate != 0 && config->sample_rate != outputDesc->getSamplingRate()) ||
+         (config->format != AUDIO_FORMAT_DEFAULT && config->format != outputDesc->getFormat()) ||
+         (config->channel_mask != 0 && config->channel_mask != outputDesc->getChannelMask())))) {
         ALOGV("%s failed opening direct output: output %d sample rate %d %d,"
                 "format %d %d, channel mask %04x %04x", __func__, *output, config->sample_rate,
                 outputDesc->getSamplingRate(), config->format, outputDesc->getFormat(),
@@ -1577,6 +1709,11 @@
     outputDesc->mDirectClientSession = session;
 
     addOutput(*output, outputDesc);
+    // The version check is essentially to avoid making this call in the case of the HIDL HAL.
+    if (auto hwModule = mHwModules.getModuleFromHandle(mPrimaryModuleHandle); hwModule &&
+            hwModule->getHalVersionMajor() >= 3) {
+        setOutputDevices(__func__, outputDesc, devices, true, 0, NULL);
+    }
     mPreviousOutputs = mOutputs;
     ALOGV("%s returns new direct output %d", __func__, *output);
     mpClientInterface->onAudioPortListUpdate();
@@ -1617,8 +1754,7 @@
     if (stream != AUDIO_STREAM_MUSIC) {
         *flags = (audio_output_flags_t)(*flags &~AUDIO_OUTPUT_FLAG_DEEP_BUFFER);
     } else if (/* stream == AUDIO_STREAM_MUSIC && */
-            *flags == AUDIO_OUTPUT_FLAG_NONE &&
-            property_get_bool("audio.deep_buffer.media", false /* default_value */)) {
+            *flags == AUDIO_OUTPUT_FLAG_NONE && mConfig->useDeepBufferForMedia()) {
         // use DEEP_BUFFER as default output for music stream type
         *flags = (audio_output_flags_t)AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
     }
@@ -1638,19 +1774,23 @@
     }
 
     // Use the spatializer output if the content can be spatialized, no preferred mixer
-    // was specified and offload or direct playback is not explicitly requested.
+    // was specified and offload or direct playback is not explicitly requested, and there is no
+    // haptic channel included in playback
     *isSpatialized = false;
-    if (mSpatializerOutput != nullptr
-            && canBeSpatializedInt(attr, config, devices.toTypeAddrVector())
-            && prefMixerConfigInfo == nullptr
-            && ((*flags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0)) {
+    if (mSpatializerOutput != nullptr &&
+        canBeSpatializedInt(attr, config, devices.toTypeAddrVector()) &&
+        prefMixerConfigInfo == nullptr &&
+        ((*flags & (AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD | AUDIO_OUTPUT_FLAG_DIRECT)) == 0) &&
+        checkHapticCompatibilityOnSpatializerOutput(config, session)) {
         *isSpatialized = true;
         return mSpatializerOutput->mIoHandle;
     }
 
     audio_config_t directConfig = *config;
     directConfig.channel_mask = channelMask;
-    status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output);
+
+    status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output,
+                                       *attr);
     if (status != NAME_NOT_FOUND) {
         return output;
     }
@@ -1701,6 +1841,24 @@
             // at this stage we should ignore the DIRECT flag as no direct output could be
             // found earlier
             *flags = (audio_output_flags_t) (*flags & ~AUDIO_OUTPUT_FLAG_DIRECT);
+            if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+                // If the preferred mixer attributes is null, do not select the bit-perfect output
+                // unless the bit-perfect output is the only output.
+                // The bit-perfect output can exist while the passed in preferred mixer attributes
+                // info is null when it is a high priority client. The high priority clients are
+                // ringtone or alarm, which is not a bit-perfect use case.
+                size_t i = 0;
+                while (i < outputs.size() && outputs.size() > 1) {
+                    auto desc = mOutputs.valueFor(outputs[i]);
+                    // The output descriptor must not be null here.
+                    if (desc->isBitPerfect()) {
+                        outputs.removeItemsAt(i);
+                    } else {
+                        i += 1;
+                    }
+                }
+            }
             output = selectOutput(
                     outputs, *flags, config->format, channelMask, config->sample_rate, session);
         }
@@ -2069,8 +2227,7 @@
     // matching criteria values in priority order for best matching output so far
     std::vector<uint32_t> bestMatchCriteria(8, 0);
 
-    const bool hasOrphanHaptic =
-            mEffects.hasOrphanEffectsForSessionAndType(sessionId, FX_IID_HAPTICGENERATOR);
+    const bool hasOrphanHaptic = mEffects.hasOrphansForSession(sessionId, FX_IID_HAPTICGENERATOR);
     const uint32_t channelCount = audio_channel_count_from_out_mask(channelMask);
     const uint32_t hapticChannelCount = audio_channel_count_from_out_mask(
         channelMask & AUDIO_CHANNEL_HAPTIC_ALL);
@@ -2103,7 +2260,7 @@
         // matching.
         if ((outputHapticChannelCount >= hapticChannelCount && format == outputDesc->getFormat() &&
              samplingRate == outputDesc->getSamplingRate()) ||
-            (hapticChannelCount == 0 && hasOrphanHaptic)) {
+            (outputHapticChannelCount != 0 && hasOrphanHaptic)) {
             currentMatchCriteria[0] = outputHapticChannelCount;
         }
 
@@ -2130,7 +2287,14 @@
 
         // sampling rate match
         if (samplingRate > SAMPLE_RATE_HZ_DEFAULT) {
-            currentMatchCriteria[4] = outputDesc->getSamplingRate();
+            int diff;  // avoid unsigned integer overflow.
+            __builtin_sub_overflow(outputDesc->getSamplingRate(), samplingRate, &diff);
+
+            // prefer the closest output sampling rate greater than or equal to target
+            // if none exists, prefer the closest output sampling rate less than target.
+            //
+            // criteria is offset to make non-negative.
+            currentMatchCriteria[4] = diff >= 0 ? -diff + 200'000'000 : diff + 100'000'000;
         }
 
         // performance flags match
@@ -2177,6 +2341,20 @@
     ALOGV("startOutput() output %d, stream %d, session %d",
           outputDesc->mIoHandle, client->stream(), client->session());
 
+    if (com::android::media::audioserver::fix_concurrent_playback_behavior_with_bit_perfect_client()
+            && gHighPriorityUseCases.count(client->attributes().usage) != 0
+            && outputDesc->isBitPerfect()) {
+        // Usually, APM selects bit-perfect output for high priority use cases only when
+        // bit-perfect output is the only output that can be routed to the selected device.
+        // However, here is no need to play high priority use cases such as ringtone and alarm
+        // on the bit-perfect path. Reopen the output and return DEAD_OBJECT so that the client
+        // can attach to new output.
+        ALOGD("%s: reopen bit-perfect output as high priority use case(%d) is starting",
+              __func__, client->stream());
+        reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        return DEAD_OBJECT;
+    }
+
     status_t status = outputDesc->start();
     if (status != NO_ERROR) {
         return status;
@@ -2195,7 +2373,6 @@
                 ALOGE("%s unable to open output with default config", __func__);
                 return status;
             }
-            desc->mUsePreferredMixerAttributes = true;
         }
         return status;
     }
@@ -2219,22 +2396,25 @@
                 if (desc == nullptr) {
                     return BAD_VALUE;
                 }
-                desc->mUsePreferredMixerAttributes = true;
+                desc->mPreferredAttrInfo = info;
                 // Intentionally return error to let the client side resending request for
                 // creating and starting.
                 return DEAD_OBJECT;
             }
             info->increaseActiveClient();
-            if (info->getActiveClientCount() == 1 &&
-                (info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            if (info->getActiveClientCount() == 1 && info->isBitPerfect()) {
                 // If it is first bit-perfect client, reroute all clients that will be routed to
                 // the bit-perfect sink so that it is guaranteed only bit-perfect stream is active.
                 PortHandleVector clientsToInvalidate;
+                std::vector<sp<SwAudioOutputDescriptor>> outputsToResetDevice;
                 for (size_t i = 0; i < mOutputs.size(); i++) {
-                    if (mOutputs[i] == outputDesc ||
-                        mOutputs[i]->devices().filter(outputDesc->devices()).isEmpty()) {
+                    if (mOutputs[i] == outputDesc || (!mOutputs[i]->devices().isEmpty() &&
+                        mOutputs[i]->devices().filter(outputDesc->devices()).isEmpty())) {
                         continue;
                     }
+                    if (mOutputs[i]->getPatchHandle() != AUDIO_PATCH_HANDLE_NONE) {
+                        outputsToResetDevice.push_back(mOutputs[i]);
+                    }
                     for (const auto& c : mOutputs[i]->getClientIterable()) {
                         clientsToInvalidate.push_back(c->portId());
                     }
@@ -2244,6 +2424,9 @@
                           __func__);
                     mpClientInterface->invalidateTracks(clientsToInvalidate);
                 }
+                for (const auto& output : outputsToResetDevice) {
+                    resetOutputDevice(output, 0 /*delayMs*/, nullptr /*patchHandle*/);
+                }
             }
         }
     }
@@ -2256,6 +2439,15 @@
         usleep(delayMs * 1000);
     }
 
+    if (status == NO_ERROR &&
+        outputDesc->mPreferredAttrInfo != nullptr &&
+        outputDesc->isBitPerfect() &&
+        com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // A new client is started on bit-perfect output, update all clients internal mute.
+        updateClientsInternalMute(outputDesc);
+    }
+
     return status;
 }
 
@@ -2360,6 +2552,11 @@
              followsSameRouting(clientAttr, attributes_initializer(AUDIO_USAGE_NOTIFICATION)) ||
              (beaconMuteLatency > 0));
         uint32_t waitMs = beaconMuteLatency;
+        const bool needToCloseBitPerfectOutput =
+                (com::android::media::audioserver::
+                        fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+                gHighPriorityUseCases.count(clientAttr.usage) != 0);
+        std::vector<sp<SwAudioOutputDescriptor>> outputsToReopen;
         for (size_t i = 0; i < mOutputs.size(); i++) {
             sp<SwAudioOutputDescriptor> desc = mOutputs.valueAt(i);
             if (desc != outputDesc) {
@@ -2396,15 +2593,22 @@
                 // Note restoring AudioTracks onto this output needs to invoke
                 // a volume ramp if there is no mute.
                 requiresMuteCheck |= sharedDevice && isActive;
+
+                if (needToCloseBitPerfectOutput && desc->isBitPerfect()) {
+                    outputsToReopen.push_back(desc);
+                }
             }
         }
 
-        if (outputDesc->mUsePreferredMixerAttributes && devices != outputDesc->devices()) {
+        if (outputDesc->mPreferredAttrInfo != nullptr && devices != outputDesc->devices()) {
             // If the output is open with preferred mixer attributes, but the routed device is
             // changed when calling this function, returning DEAD_OBJECT to indicate routing
             // changed.
             return DEAD_OBJECT;
         }
+        for (auto& outputToReopen : outputsToReopen) {
+            reopenOutput(outputToReopen, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+        }
         const uint32_t muteWaitMs =
                 setOutputDevices(__func__, outputDesc, devices, force, 0, nullptr,
                                  requiresMuteCheck);
@@ -2483,7 +2687,7 @@
                                     getAudioDeviceOutLeAudioUnicastSet()).isEmpty()))) {
                 DeviceVector newDevices = getNewOutputDevices(desc, false /*fromCache*/);
                 bool force = desc->devices() != newDevices;
-                if (desc->mUsePreferredMixerAttributes && force) {
+                if (desc->mPreferredAttrInfo != nullptr && force) {
                     // If the device is using preferred mixer attributes, the output need to reopen
                     // with default configuration when the new selected devices are different from
                     // current routing devices.
@@ -2531,12 +2735,21 @@
     if (outputDesc->devices().size() == 1) {
         sp<PreferredMixerAttributesInfo> info = getPreferredMixerAttributesInfo(
                 outputDesc->devices()[0]->getId(), client->strategy());
+        bool outputReopened = false;
         if (info != nullptr && info->getUid() == client->uid()) {
             info->decreaseActiveClient();
             if (info->getActiveClientCount() == 0) {
                 reopenOutput(outputDesc, nullptr /*config*/, AUDIO_OUTPUT_FLAG_NONE, __func__);
+                outputReopened = true;
             }
         }
+        if (com::android::media::audioserver::
+                    fix_concurrent_playback_behavior_with_bit_perfect_client() &&
+            !outputReopened && outputDesc->isBitPerfect()) {
+            // Only need to update the clients' internal mute when the output is bit-perfect and it
+            // is not reopened.
+            updateClientsInternalMute(outputDesc);
+        }
     }
     return status;
 }
@@ -2608,7 +2821,7 @@
                     DeviceVector newDevices2 = getNewOutputDevices(desc, false /*fromCache*/);
                     bool force = desc->devices() != newDevices2;
 
-                    if (desc->mUsePreferredMixerAttributes && force) {
+                    if (desc->mPreferredAttrInfo != nullptr && force) {
                         // If the device is using preferred mixer attributes, the output need to
                         // reopen with default configuration when the new selected devices are
                         // different from current routing devices.
@@ -2971,43 +3184,115 @@
         }
     }
 
+    bool isPreemptor = false;
     if (!profile->canOpenNewIo()) {
-        for (size_t i = 0; i < mInputs.size(); ) {
-            sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
-            if (desc->mProfile != profile) {
-                i++;
-                continue;
-            }
-            // if sound trigger, reuse input if used by other sound trigger on same session
-            // else
-            //    reuse input if active client app is not in IDLE state
-            //
-            RecordClientVector clients = desc->clientsList();
-            bool doClose = false;
-            for (const auto& client : clients) {
-                if (isSoundTrigger != client->isSoundTrigger()) {
+        if (com::android::media::audioserver::fix_input_sharing_logic()) {
+            //  First pick best candidate for preemption (there may not be any):
+            //  - Preempt and input if:
+            //     - It has only strictly lower priority use cases than the new client
+            //     - It has equal priority use cases than the new client, was not
+            //     opened thanks to preemption or has been active since opened.
+            //  - Order the preemption candidates by inactive first and priority second
+            sp<AudioInputDescriptor> closeCandidate;
+            int leastCloseRank = INT_MAX;
+            static const int sCloseActive = 0x100;
+
+            for (size_t i = 0; i < mInputs.size(); i++) {
+                sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                if (desc->mProfile != profile) {
                     continue;
                 }
-                if (client->isSoundTrigger()) {
-                    if (session == client->session()) {
+                sp<RecordClientDescriptor> topPrioClient = desc->getHighestPriorityClient();
+                if (topPrioClient == nullptr) {
+                    continue;
+                }
+                int topPrio = source_priority(topPrioClient->source());
+                if (topPrio < source_priority(attributes.source)
+                      || (topPrio == source_priority(attributes.source)
+                          && !desc->isPreemptor())) {
+                    int closeRank = (desc->isActive() ? sCloseActive : 0) + topPrio;
+                    if (closeRank < leastCloseRank) {
+                        leastCloseRank = closeRank;
+                        closeCandidate = desc;
+                    }
+                }
+            }
+
+            if (closeCandidate != nullptr) {
+                closeInput(closeCandidate->mIoHandle);
+                // Mark the new input as being issued from a preemption
+                // so that is will not be preempted later
+                isPreemptor = true;
+            } else {
+                // Then pick the best reusable input (There is always one)
+                // The order of preference is:
+                // 1) active inputs with same use case as the new client
+                // 2) inactive inputs with same use case
+                // 3) active inputs with different use cases
+                // 4) inactive inputs with different use cases
+                sp<AudioInputDescriptor> reuseCandidate;
+                int leastReuseRank = INT_MAX;
+                static const int sReuseDifferentUseCase = 0x100;
+
+                for (size_t i = 0; i < mInputs.size(); i++) {
+                    sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                    if (desc->mProfile != profile) {
+                        continue;
+                    }
+                    int reuseRank = sReuseDifferentUseCase;
+                    for (const auto& client: desc->getClientIterable()) {
+                        if (client->source() == attributes.source) {
+                            reuseRank = 0;
+                            break;
+                        }
+                    }
+                    reuseRank += desc->isActive() ? 0 : 1;
+                    if (reuseRank < leastReuseRank) {
+                        leastReuseRank = reuseRank;
+                        reuseCandidate = desc;
+                    }
+                }
+                return reuseCandidate->mIoHandle;
+            }
+        } else { // fix_input_sharing_logic()
+            for (size_t i = 0; i < mInputs.size(); ) {
+                 sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                 if (desc->mProfile != profile) {
+                     i++;
+                     continue;
+                 }
+                // if sound trigger, reuse input if used by other sound trigger on same session
+                // else
+                //    reuse input if active client app is not in IDLE state
+                //
+                RecordClientVector clients = desc->clientsList();
+                bool doClose = false;
+                for (const auto& client : clients) {
+                    if (isSoundTrigger != client->isSoundTrigger()) {
+                        continue;
+                    }
+                    if (client->isSoundTrigger()) {
+                        if (session == client->session()) {
+                            return desc->mIoHandle;
+                        }
+                        continue;
+                    }
+                    if (client->active() && client->appState() != APP_STATE_IDLE) {
                         return desc->mIoHandle;
                     }
-                    continue;
+                    doClose = true;
                 }
-                if (client->active() && client->appState() != APP_STATE_IDLE) {
-                    return desc->mIoHandle;
+                if (doClose) {
+                    closeInput(desc->mIoHandle);
+                } else {
+                    i++;
                 }
-                doClose = true;
-            }
-            if (doClose) {
-                closeInput(desc->mIoHandle);
-            } else {
-                i++;
             }
         }
     }
 
-    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(profile, mpClientInterface);
+    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
+            profile, mpClientInterface, isPreemptor);
 
     audio_config_t lConfig = AUDIO_CONFIG_INITIALIZER;
     lConfig.sample_rate = profileSamplingRate;
@@ -3230,6 +3515,23 @@
     releaseInput(portId);
 }
 
+bool AudioPolicyManager::checkCloseInput(const sp<AudioInputDescriptor>& input) {
+    if (input->clientsList().size() == 0
+            || !mAvailableInputDevices.containsAtLeastOne(input->supportedDevices())) {
+        return true;
+    }
+    for (const auto& client : input->clientsList()) {
+        sp<DeviceDescriptor> device =
+            mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
+                                                 client->session());
+        if (!input->supportedDevices().contains(device)) {
+            return true;
+        }
+    }
+    setInputDevice(input->mIoHandle, getNewInputDevice(input));
+    return false;
+}
+
 void AudioPolicyManager::checkCloseInputs() {
     // After connecting or disconnecting an input device, close input if:
     // - it has no client (was just opened to check profile)  OR
@@ -3238,35 +3540,37 @@
     // devices anymore. Otherwise update device selection
     std::vector<audio_io_handle_t> inputsToClose;
     for (size_t i = 0; i < mInputs.size(); i++) {
-        const sp<AudioInputDescriptor> input = mInputs.valueAt(i);
-        if (input->clientsList().size() == 0
-                || !mAvailableInputDevices.containsAtLeastOne(input->supportedDevices())) {
+        if (checkCloseInput(mInputs.valueAt(i))) {
             inputsToClose.push_back(mInputs.keyAt(i));
-        } else {
-            bool close = false;
-            for (const auto& client : input->clientsList()) {
-                sp<DeviceDescriptor> device =
-                    mEngine->getInputDeviceForAttributes(client->attributes(), client->uid(),
-                                                         client->session());
-                if (!input->supportedDevices().contains(device)) {
-                    close = true;
-                    break;
-                }
-            }
-            if (close) {
-                inputsToClose.push_back(mInputs.keyAt(i));
-            } else {
-                setInputDevice(input->mIoHandle, getNewInputDevice(input));
-            }
         }
     }
-
     for (const audio_io_handle_t handle : inputsToClose) {
         ALOGV("%s closing input %d", __func__, handle);
         closeInput(handle);
     }
 }
 
+status_t AudioPolicyManager::setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                            const char *address __unused,
+                                                            bool enabled,
+                                                            audio_stream_type_t streamToDriveAbs)
+{
+    if (!enabled) {
+        mAbsoluteVolumeDrivingStreams.erase(deviceType);
+        return NO_ERROR;
+    }
+
+    audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
+    if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
+        ALOGW("%s: no attributes for stream %s, bailing out", __func__,
+              toString(streamToDriveAbs).c_str());
+        return BAD_VALUE;
+    }
+
+    mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
+    return NO_ERROR;
+}
+
 void AudioPolicyManager::initStreamVolume(audio_stream_type_t stream, int indexMin, int indexMax)
 {
     ALOGV("initStreamVolume() stream %d, min %d, max %d", stream , indexMin, indexMax);
@@ -3294,8 +3598,8 @@
         ALOGW("%s: no group for stream %s, bailing out", __func__, toString(stream).c_str());
         return NO_ERROR;
     }
-    ALOGV("%s: stream %s attributes=%s", __func__,
-          toString(stream).c_str(), toString(attributes).c_str());
+    ALOGV("%s: stream %s attributes=%s, index %d , device 0x%X", __func__,
+          toString(stream).c_str(), toString(attributes).c_str(), index, device);
     return setVolumeIndexForAttributes(attributes, index, device);
 }
 
@@ -3454,7 +3758,9 @@
         if (isVolumeConsistentForCalls(vs, {mCallRxSourceClient->sinkDevice()->type()},
                 isVoiceVolSrc, isBtScoVolSrc, __func__)
                 && (isVoiceVolSrc || isBtScoVolSrc)) {
-            setVoiceVolume(index, curves, isVoiceVolSrc, 0);
+            bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+                    !audio_is_ble_out_device(mCallRxSourceClient->sinkDevice()->type());
+            setVoiceVolume(index, curves, voiceVolumeManagedByHost, 0);
         }
     }
 
@@ -3471,8 +3777,8 @@
     bool hasVoice = hasVoiceStream(volumeCurves.getStreamTypes());
     if (((index < volumeCurves.getVolumeIndexMin()) && !(hasVoice && index == 0)) ||
             (index > volumeCurves.getVolumeIndexMax())) {
-        ALOGD("%s: wrong index %d min=%d max=%d", __FUNCTION__, index,
-              volumeCurves.getVolumeIndexMin(), volumeCurves.getVolumeIndexMax());
+        ALOGE("%s: wrong index %d min=%d max=%d, device 0x%X", __FUNCTION__, index,
+              volumeCurves.getVolumeIndexMin(), volumeCurves.getVolumeIndexMax(), device);
         return BAD_VALUE;
     }
     if (!audio_is_output_device(device)) {
@@ -3534,9 +3840,10 @@
     // 1: An offloaded output. If the effect ends up not being offloadable,
     //    AudioFlinger will invalidate the track and the offloaded output
     //    will be closed causing the effect to be moved to a PCM output.
-    // 2: A deep buffer output
-    // 3: The primary output
-    // 4: the first output in the list
+    // 2: Spatializer output if the stereo spatializer feature enabled
+    // 3: A deep buffer output
+    // 4: The primary output
+    // 5: the first output in the list
 
     DeviceVector devices = mEngine->getOutputDevicesForAttributes(
                 attributes_initializer(AUDIO_USAGE_MEDIA), nullptr, false /*fromCache*/);
@@ -3551,28 +3858,36 @@
 
     while (output == AUDIO_IO_HANDLE_NONE) {
         audio_io_handle_t outputOffloaded = AUDIO_IO_HANDLE_NONE;
+        audio_io_handle_t outputSpatializer = AUDIO_IO_HANDLE_NONE;
         audio_io_handle_t outputDeepBuffer = AUDIO_IO_HANDLE_NONE;
         audio_io_handle_t outputPrimary = AUDIO_IO_HANDLE_NONE;
 
-        for (audio_io_handle_t output : outputs) {
-            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(output);
+        for (audio_io_handle_t outputLoop : outputs) {
+            sp<SwAudioOutputDescriptor> desc = mOutputs.valueFor(outputLoop);
             if (activeOnly && !desc->isActive(toVolumeSource(AUDIO_STREAM_MUSIC))) {
                 continue;
             }
             ALOGV("selectOutputForMusicEffects activeOnly %d output %d flags 0x%08x",
-                  activeOnly, output, desc->mFlags);
+                  activeOnly, outputLoop, desc->mFlags);
             if ((desc->mFlags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD) != 0) {
-                outputOffloaded = output;
+                outputOffloaded = outputLoop;
+            }
+            if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
+                if (SpatializerHelper::isStereoSpatializationFeatureEnabled()) {
+                    outputSpatializer = outputLoop;
+                }
             }
             if ((desc->mFlags & AUDIO_OUTPUT_FLAG_DEEP_BUFFER) != 0) {
-                outputDeepBuffer = output;
+                outputDeepBuffer = outputLoop;
             }
             if ((desc->mFlags & AUDIO_OUTPUT_FLAG_PRIMARY) != 0) {
-                outputPrimary = output;
+                outputPrimary = outputLoop;
             }
         }
         if (outputOffloaded != AUDIO_IO_HANDLE_NONE) {
             output = outputOffloaded;
+        } else if (outputSpatializer != AUDIO_IO_HANDLE_NONE) {
+            output = outputSpatializer;
         } else if (outputDeepBuffer != AUDIO_IO_HANDLE_NONE) {
             output = outputDeepBuffer;
         } else if (outputPrimary != AUDIO_IO_HANDLE_NONE) {
@@ -4099,8 +4414,9 @@
             // As done in setDeviceConnectionState, we could also fix default device issue by
             // preventing the force re-routing in case of default dev that distinguishes on address.
             // Let's give back to engine full device choice decision however.
-            bool forceRouting = !newDevices.isEmpty();
-            if (outputDesc->mUsePreferredMixerAttributes && newDevices != outputDesc->devices()) {
+            bool newDevicesNotEmpty = !newDevices.isEmpty();
+            if (outputDesc->mPreferredAttrInfo != nullptr && newDevices != outputDesc->devices()
+                && newDevicesNotEmpty) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -4108,9 +4424,10 @@
                 continue;
             }
 
-            waitMs = setOutputDevices(__func__, outputDesc, newDevices, forceRouting, delayMs,
-                                       nullptr, !skipDelays /*requiresMuteCheck*/,
-                                      !forceRouting /*requiresVolumeCheck*/, skipDelays);
+            waitMs = setOutputDevices(__func__, outputDesc, newDevices,
+                                      newDevicesNotEmpty /*force*/, delayMs,
+                                      nullptr /*patchHandle*/, !skipDelays /*requiresMuteCheck*/,
+                                      !newDevicesNotEmpty /*requiresVolumeCheck*/, skipDelays);
             // Only apply special touch sound delay once
             delayMs = 0;
         }
@@ -4221,6 +4538,9 @@
             "Engine could not set preferred devices %s for audio source %d role %d",
             dumpAudioDeviceTypeAddrVector(devices).c_str(), audioSource, role);
 
+    if (status == NO_ERROR) {
+        updateInputRouting();
+    }
     return status;
 }
 
@@ -4380,6 +4700,13 @@
 
     dst->appendFormat("\nPolicy Engine dump:\n");
     mEngine->dump(dst);
+
+    dst->appendFormat("\nAbsolute volume devices with driving streams:\n");
+    for (const auto it : mAbsoluteVolumeDrivingStreams) {
+        dst->appendFormat("   - device type: %s, driving stream %d\n",
+                          dumpDeviceTypes({it.first}).c_str(),
+                          mEngine->getVolumeGroupForAttributes(it.second));
+    }
 }
 
 status_t AudioPolicyManager::dump(int fd)
@@ -4545,6 +4872,17 @@
     flags = (audio_output_flags_t)((flags & relevantFlags) | AUDIO_OUTPUT_FLAG_DIRECT);
 
     DeviceVector engineOutputDevices = mEngine->getOutputDevicesForAttributes(*attr);
+    if (std::any_of(engineOutputDevices.begin(), engineOutputDevices.end(),
+            [this, attr](sp<DeviceDescriptor> device) {
+                    return getPreferredMixerAttributesInfo(
+                            device->getId(),
+                            mEngine->getProductStrategyForAttributes(*attr),
+                            true /*activeBitPerfectPreferred*/) != nullptr;
+            })) {
+        // Bit-perfect playback is active on one of the selected devices, direct output will
+        // be rejected at this instant.
+        return AUDIO_DIRECT_NOT_SUPPORTED;
+    }
     for (const auto& hwModule : mHwModules) {
         DeviceVector outputDevices = engineOutputDevices;
         // the MSD module checks for different conditions and output devices
@@ -4701,7 +5039,7 @@
         const auto output = mOutputs.valueAt(i);
         if (output->mProfile == profile && output->devices().onlyContainsDevice(deviceDescriptor)) {
             if (output->isConfigurationMatched(mixerAttributes->config, flags)) {
-                output->mUsePreferredMixerAttributes = true;
+                output->mPreferredAttrInfo = mixerAttrInfo;
             } else {
                 for (const auto &client: output->getActiveClients()) {
                     if (client->uid() == uid && client->strategy() == strategy) {
@@ -4723,7 +5061,7 @@
             ALOGE("%s, failed to reopen output with preferred mixer attributes", __func__);
             continue;
         }
-        desc->mUsePreferredMixerAttributes = true;
+        desc->mPreferredAttrInfo = mixerAttrInfo;
     }
 
     return NO_ERROR;
@@ -4739,8 +5077,7 @@
     }
     if (activeBitPerfectPreferred) {
         for (auto [strategy, info] : it->second) {
-            if ((info->getFlags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE
-                && info->getActiveClientCount() != 0) {
+            if (info->isBitPerfect() && info->getActiveClientCount() != 0) {
                 return info;
             }
         }
@@ -4959,7 +5296,7 @@
             new SourceClientDescriptor(
                 portId, uid, attributes, *source, srcDevice, AUDIO_STREAM_PATCH,
                 mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes),
-                true);
+                true, false /*isCallRx*/, false /*isCallTx*/);
     sourceDesc->setPreferredDeviceId(sinkDevice->getId());
 
     status_t status =
@@ -5291,7 +5628,7 @@
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
                         srcMixPortConfig.ext.mix.usecase.stream =
-                            (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
+                            (!sourceDesc->isInternal() || sourceDesc->isCallTx()) ?
                                     mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
                                     AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
@@ -5540,7 +5877,7 @@
             invalidateStreams(mEngine->getStreamTypesForProductStrategy(ps));
         } else {
             DeviceVector newDevices = getNewOutputDevices(outputDesc, false /*fromCache*/);
-            if (outputDesc->mUsePreferredMixerAttributes && outputDesc->devices() != newDevices) {
+            if (outputDesc->mPreferredAttrInfo != nullptr && outputDesc->devices() != newDevices) {
                 // If the device is using preferred mixer attributes, the output need to reopen
                 // with default configuration when the new selected devices are different from
                 // current routing devices.
@@ -5629,7 +5966,16 @@
 status_t AudioPolicyManager::startAudioSource(const struct audio_port_config *source,
                                               const audio_attributes_t *attributes,
                                               audio_port_handle_t *portId,
-                                              uid_t uid, bool internal)
+                                              uid_t uid) {
+    return startAudioSourceInternal(source, attributes, portId, uid,
+                                    false /*internal*/, false /*isCallRx*/, 0 /*delayMs*/);
+}
+
+status_t AudioPolicyManager::startAudioSourceInternal(const struct audio_port_config *source,
+                                              const audio_attributes_t *attributes,
+                                              audio_port_handle_t *portId,
+                                              uid_t uid, bool internal, bool isCallRx,
+                                              uint32_t delayMs)
 {
     ALOGV("%s", __FUNCTION__);
     *portId = AUDIO_PORT_HANDLE_NONE;
@@ -5662,16 +6008,17 @@
         new SourceClientDescriptor(*portId, uid, *attributes, *source, srcDevice,
                                    mEngine->getStreamTypeForAttributes(*attributes),
                                    mEngine->getProductStrategyForAttributes(*attributes),
-                                   toVolumeSource(*attributes), internal);
+                                   toVolumeSource(*attributes), internal, isCallRx, false);
 
-    status_t status = connectAudioSource(sourceDesc);
+    status_t status = connectAudioSource(sourceDesc, delayMs);
     if (status == NO_ERROR) {
         mAudioSources.add(*portId, sourceDesc);
     }
     return status;
 }
 
-status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
+status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+                                                uint32_t delayMs)
 {
     ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
 
@@ -5697,7 +6044,7 @@
     audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
 
     return connectAudioSourceToSink(
-                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, 0 /*delayMs*/);
+                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, delayMs);
 }
 
 status_t AudioPolicyManager::stopAudioSource(audio_port_handle_t portId)
@@ -5755,7 +6102,8 @@
 float AudioPolicyManager::getStreamVolumeDB(
         audio_stream_type_t stream, int index, audio_devices_t device)
 {
-    return computeVolume(getVolumeCurves(stream), toVolumeSource(stream), index, {device});
+    return computeVolume(getVolumeCurves(stream), toVolumeSource(stream), index,
+                         {device}, /* adjustAttenuation= */false);
 }
 
 status_t AudioPolicyManager::getSurroundFormats(unsigned int *numSurroundFormats,
@@ -6083,12 +6431,10 @@
     // mode is not requested.
 
     if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
-        static const bool stereo_spatialization_enabled =
-                property_get_bool("ro.audio.stereo_spatialization_enabled", false);
         const bool channel_mask_spatialized =
-                (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
-                ? audio_channel_mask_contains_stereo(config->channel_mask)
-                : audio_is_channel_mask_spatialized(config->channel_mask);
+                SpatializerHelper::isStereoSpatializationFeatureEnabled()
+                        ? audio_channel_mask_contains_stereo(config->channel_mask)
+                        : audio_is_channel_mask_spatialized(config->channel_mask);
         if (!channel_mask_spatialized) {
             return false;
         }
@@ -6110,6 +6456,34 @@
     return true;
 }
 
+// The Spatializer output is compatible with Haptic use cases if:
+// 1. the Spatializer output thread supports Haptic, and format/sampleRate are same
+// with client if client haptic channel bits were set, or
+// 2. the Spatializer output thread does not support Haptic, and client did not ask haptic by
+// including the haptic bits or creating the HapticGenerator effect for same session.
+bool AudioPolicyManager::checkHapticCompatibilityOnSpatializerOutput(
+        const audio_config_t* config, audio_session_t sessionId) const {
+    const auto clientHapticChannel =
+            audio_channel_count_from_out_mask(config->channel_mask & AUDIO_CHANNEL_HAPTIC_ALL);
+    const auto threadOutputHapticChannel = audio_channel_count_from_out_mask(
+            mSpatializerOutput->getChannelMask() & AUDIO_CHANNEL_HAPTIC_ALL);
+
+    if (threadOutputHapticChannel) {
+        // check format and sampleRate match if client haptic channel mask exist
+        if (clientHapticChannel) {
+            return mSpatializerOutput->getFormat() == config->format &&
+                   mSpatializerOutput->getSamplingRate() == config->sample_rate;
+        }
+        return true;
+    } else {
+        // in the case of the Spatializer output channel mask does not have haptic channel bits, it
+        // means haptic use cases (either the client channelmask includes haptic bits, or created a
+        // HapticGenerator effect for this session) are not supported.
+        return clientHapticChannel == 0 &&
+               !mEffects.hasOrphansForSession(sessionId, FX_IID_HAPTICGENERATOR);
+    }
+}
+
 void AudioPolicyManager::checkVirtualizerClientRoutes() {
     std::set<audio_stream_type_t> streamsToInvalidate;
     for (size_t i = 0; i < mOutputs.size(); i++) {
@@ -6398,13 +6772,23 @@
             if (!mConfig->getOutputDevices().contains(supportedDevice)) {
                 continue;
             }
+
+            if (outProfile->isMmap() && !outProfile->hasDynamicAudioProfile()
+                && availProfileDevices.areAllDevicesAttached()) {
+                ALOGV("%s skip opening output for mmap profile %s", __func__,
+                        outProfile->getTagName().c_str());
+                continue;
+            }
+
             sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
                                                                                  mpClientInterface);
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+            audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE;
+            audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
             status_t status = outputDesc->open(nullptr /* halConfig */, nullptr /* mixerConfig */,
                                                DeviceVector(supportedDevice),
                                                AUDIO_STREAM_DEFAULT,
-                                               AUDIO_OUTPUT_FLAG_NONE, &output);
+                                               &flags, &output, attributes);
             if (status != NO_ERROR) {
                 ALOGW("Cannot open output stream for devices %s on hw module %s",
                       supportedDevice->toString().c_str(), hwModule->getName());
@@ -6457,19 +6841,27 @@
                     __func__, inProfile->getTagName().c_str());
                 continue;
             }
-            sp<AudioInputDescriptor> inputDesc =
-                    new AudioInputDescriptor(inProfile, mpClientInterface);
+
+            if (inProfile->isMmap() && !inProfile->hasDynamicAudioProfile()
+                && availProfileDevices.areAllDevicesAttached()) {
+                ALOGV("%s skip opening input for mmap profile %s", __func__,
+                        inProfile->getTagName().c_str());
+                continue;
+            }
+
+            sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
+                    inProfile, mpClientInterface, false /*isPreemptor*/);
 
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
             status_t status = inputDesc->open(nullptr,
                                               availProfileDevices.itemAt(0),
                                               AUDIO_SOURCE_MIC,
-                                              AUDIO_INPUT_FLAG_NONE,
+                                              (audio_input_flags_t) inProfile->getFlags(),
                                               &input);
             if (status != NO_ERROR) {
-                ALOGW("Cannot open input stream for device %s on hw module %s",
-                      availProfileDevices.toString().c_str(),
-                      hwModule->getName());
+                ALOGW("%s: Cannot open input stream for device %s for profile %s on hw module %s",
+                        __func__, availProfileDevices.toString().c_str(),
+                        inProfile->getTagName().c_str(), hwModule->getName());
                 continue;
             }
             for (const auto &device : availProfileDevices) {
@@ -6494,6 +6886,15 @@
         if ((desc->mFlags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0
                 && !isOutputOnlyAvailableRouteToSomeDevice(desc)) {
             outputsClosed.push_back(desc->mIoHandle);
+            nextAudioPortGeneration();
+            ssize_t index = mAudioPatches.indexOfKey(desc->getPatchHandle());
+            if (index >= 0) {
+                sp<AudioPatch> patchDesc = mAudioPatches.valueAt(index);
+                (void) /*status_t status*/ mpClientInterface->releaseAudioPatch(
+                            patchDesc->getAfHandle(), 0);
+                mAudioPatches.removeItemsAt(index);
+                mpClientInterface->onAudioPatchListUpdate();
+            }
             desc->close();
         }
     }
@@ -6568,8 +6969,8 @@
                 sp<IOProfile> profile = hwModule->getOutputProfiles()[j];
                 if (profile->supportsDevice(device)) {
                     profiles.add(profile);
-                    ALOGV("checkOutputsForDevice(): adding profile %zu from module %s",
-                          j, hwModule->getName());
+                    ALOGV("%s(): adding profile %s from module %s",
+                            __func__, profile->getTagName().c_str(), hwModule->getName());
                 }
             }
         }
@@ -6602,7 +7003,11 @@
             if (j != outputs.size()) {
                 continue;
             }
-
+            if (profile->isMmap() && !profile->hasDynamicAudioProfile()) {
+                ALOGV("%s skip opening output for mmap profile %s",
+                      __func__, profile->getTagName().c_str());
+                continue;
+            }
             if (!profile->canOpenNewIo()) {
                 ALOGW("Max Output number %u already opened for this profile %s",
                       profile->maxOpenCount, profile->getTagName().c_str());
@@ -6663,9 +7068,8 @@
                 if (!profile->supportsDevice(device)) {
                     continue;
                 }
-                ALOGV("checkOutputsForDevice(): "
-                        "clearing direct output profile %zu on module %s",
-                        j, hwModule->getName());
+                ALOGV("%s(): clearing direct output profile %s on module %s",
+                        __func__, profile->getTagName().c_str(), hwModule->getName());
                 profile->clearAudioProfiles();
                 if (!profile->hasDynamicAudioProfile()) {
                     continue;
@@ -6694,14 +7098,14 @@
 status_t AudioPolicyManager::checkInputsForDevice(const sp<DeviceDescriptor>& device,
                                                   audio_policy_dev_state_t state)
 {
-    sp<AudioInputDescriptor> desc;
-
     if (audio_device_is_digital(device->type())) {
         // erase all current sample rates, formats and channel masks
         device->clearAudioProfiles();
     }
 
     if (state == AUDIO_POLICY_DEVICE_STATE_AVAILABLE) {
+        sp<AudioInputDescriptor> desc;
+
         // first call getAudioPort to get the supported attributes from the HAL
         struct audio_port_v7 port = {};
         device->toAudioPort(&port);
@@ -6720,8 +7124,8 @@
 
                 if (profile->supportsDevice(device)) {
                     profiles.add(profile);
-                    ALOGV("checkInputsForDevice(): adding profile %zu from module %s",
-                          profile_index, hwModule->getName());
+                    ALOGV("%s : adding profile %s from module %s", __func__,
+                          profile->getTagName().c_str(), hwModule->getName());
                 }
             }
         }
@@ -6753,15 +7157,22 @@
                 continue;
             }
 
+            if (profile->isMmap() && !profile->hasDynamicAudioProfile()) {
+                ALOGV("%s skip opening input for mmap profile %s",
+                      __func__, profile->getTagName().c_str());
+                continue;
+            }
             if (!profile->canOpenNewIo()) {
-                ALOGW("Max Input number %u already opened for this profile %s",
-                      profile->maxOpenCount, profile->getTagName().c_str());
+                ALOGW("%s Max Input number %u already opened for this profile %s",
+                      __func__, profile->maxOpenCount, profile->getTagName().c_str());
                 continue;
             }
 
-            desc = new AudioInputDescriptor(profile, mpClientInterface);
+            desc = new AudioInputDescriptor(profile, mpClientInterface, false  /*isPreemptor*/);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC, AUDIO_INPUT_FLAG_NONE, &input);
+            ALOGV("%s opening input for profile %s", __func__, profile->getTagName().c_str());
+            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC,
+                                (audio_input_flags_t) profile->getFlags(), &input);
 
             if (status == NO_ERROR) {
                 const String8& address = String8(device->address().c_str());
@@ -6772,7 +7183,8 @@
                 }
                 updateAudioProfiles(device, input, profile);
                 if (!profile->hasValidAudioProfile()) {
-                    ALOGW("checkInputsForDevice() direct input missing param");
+                    ALOGW("%s direct input missing param for profile %s", __func__,
+                            profile->getTagName().c_str());
                     desc->close();
                     input = AUDIO_IO_HANDLE_NONE;
                 }
@@ -6783,15 +7195,22 @@
             } // endif input != 0
 
             if (input == AUDIO_IO_HANDLE_NONE) {
-                ALOGW("%s could not open input for device %s", __func__,
-                       device->toString().c_str());
+                ALOGW("%s could not open input for device %s on profile %s", __func__,
+                       device->toString().c_str(), profile->getTagName().c_str());
                 profiles.removeAt(profile_index);
                 profile_index--;
             } else {
                 if (audio_device_is_digital(device->type())) {
                     device->importAudioPortAndPickAudioProfile(profile);
                 }
-                ALOGV("checkInputsForDevice(): adding input %d", input);
+                ALOGV("%s: adding input %d for profile %s", __func__,
+                        input, profile->getTagName().c_str());
+
+                if (checkCloseInput(desc)) {
+                    ALOGV("%s: closing input %d for profile %s", __func__,
+                            input, profile->getTagName().c_str());
+                    closeInput(input);
+                }
             }
         } // end scan profiles
 
@@ -6808,8 +7227,8 @@
                  profile_index++) {
                 sp<IOProfile> profile = hwModule->getInputProfiles()[profile_index];
                 if (profile->supportsDevice(device)) {
-                    ALOGV("checkInputsForDevice(): clearing direct input profile %zu on module %s",
-                            profile_index, hwModule->getName());
+                    ALOGV("%s: clearing direct input profile %s on module %s", __func__,
+                            profile->getTagName().c_str(), hwModule->getName());
                     profile->clearAudioProfiles();
                 }
             }
@@ -6874,8 +7293,7 @@
         closingOutput->stop();
     }
     closingOutput->close();
-    if ((closingOutput->getFlags().output & AUDIO_OUTPUT_FLAG_BIT_PERFECT)
-            == AUDIO_OUTPUT_FLAG_BIT_PERFECT) {
+    if (closingOutput->isBitPerfect()) {
         for (const auto device : closingOutput->devices()) {
             device->setPreferredConfig(nullptr);
         }
@@ -6907,6 +7325,10 @@
             setMsdOutputPatches();
         }
     }
+
+    if (closingOutput->mPreferredAttrInfo != nullptr) {
+        closingOutput->mPreferredAttrInfo->resetActiveClient();
+    }
 }
 
 void AudioPolicyManager::closeInput(audio_io_handle_t input)
@@ -6997,8 +7419,8 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc != nullptr && followsSameRouting(attr, sourceDesc->attributes())
                 && sourceDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE
-                && !isCallRxAudioSource(sourceDesc) && !sourceDesc->isInternal()) {
-            connectAudioSource(sourceDesc);
+                && !sourceDesc->isCallRx() && !sourceDesc->isInternal()) {
+            connectAudioSource(sourceDesc, 0 /*delayMs*/);
         }
     }
 }
@@ -7029,28 +7451,35 @@
     std::vector<sp<SwAudioOutputDescriptor>> invalidatedOutputs;
     // take into account dynamic audio policies related changes: if a client is now associated
     // to a different policy mix than at creation time, invalidate corresponding stream
+    // invalidate clients on outputs that do not support all the newly selected devices for the
+    // strategy
     for (size_t i = 0; i < mPreviousOutputs.size(); i++) {
         const sp<SwAudioOutputDescriptor>& desc = mPreviousOutputs.valueAt(i);
-        if (desc->isDuplicated()) {
+        if (desc->isDuplicated() || desc->getClientCount() == 0) {
             continue;
         }
+
         for (const sp<TrackClientDescriptor>& client : desc->getClientIterable()) {
             if (mEngine->getProductStrategyForAttributes(client->attributes()) != psId) {
                 continue;
             }
+            if (!desc->supportsAllDevices(newDevices)) {
+                invalidatedOutputs.push_back(desc);
+                break;
+            }
             sp<AudioPolicyMix> primaryMix;
             status_t status = mPolicyMixes.getOutputForAttr(client->attributes(), client->config(),
                     client->uid(), client->session(), client->flags(), mAvailableOutputDevices,
                     nullptr /* requestedDevice */, primaryMix, nullptr /* secondaryMixes */,
                     unneededUsePrimaryOutputFromPolicyMixes);
-            if (status != OK) {
-                continue;
-            }
-            if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
-                if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
-                    maxLatency = desc->latency();
+            if (status == OK) {
+                if (client->getPrimaryMix() != primaryMix || client->hasLostPrimaryMix()) {
+                    if (desc->isStrategyActive(psId) && maxLatency < desc->latency()) {
+                        maxLatency = desc->latency();
+                    }
+                    invalidatedOutputs.push_back(desc);
+                    break;
                 }
-                invalidatedOutputs.push_back(desc);
             }
         }
     }
@@ -7104,8 +7533,8 @@
                 }
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
-            if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
-                connectAudioSource(source);
+            if (source != nullptr && !source->isCallRx() && !source->isInternal()) {
+                connectAudioSource(source, 0 /*delayMs*/);
             }
         }
 
@@ -7168,7 +7597,8 @@
                     client->getSecondaryOutputs().begin(),
                     client->getSecondaryOutputs().end(),
                     secondaryDescs.begin(), secondaryDescs.end())) {
-                if (!audio_is_linear_pcm(client->config().format)) {
+                if (client->flags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD
+                        || !audio_is_linear_pcm(client->config().format)) {
                     // If the format is not PCM, the tracks should be invalidated to get correct
                     // behavior when the secondary output is changed.
                     clientsToInvalidate.push_back(client->portId());
@@ -7728,9 +8158,21 @@
                         if (result.source == AUDIO_SOURCE_HOTWORD && !inputDesc->isSoundTrigger()) {
                             result.source = AUDIO_SOURCE_VOICE_RECOGNITION;
                         }
-                        return result; }).
+                        return result; });
             //only one input device for now
-                    addSource(device);
+            if (audio_is_remote_submix_device(device->type())) {
+                // remote submix HAL does not support audio conversion, need source device
+                // audio config to match the sink input descriptor audio config, otherwise AIDL
+                // HAL patching will fail
+                audio_port_config srcDevicePortConfig = {};
+                device->toAudioPortConfig(&srcDevicePortConfig, nullptr);
+                srcDevicePortConfig.sample_rate = inputDesc->getSamplingRate();
+                srcDevicePortConfig.channel_mask = inputDesc->getChannelMask();
+                srcDevicePortConfig.format = inputDesc->getFormat();
+                patchBuilder.addSource(srcDevicePortConfig);
+            } else {
+                patchBuilder.addSource(device);
+            }
             status = installPatch(__func__, patchHandle, inputDesc.get(), patchBuilder.patch(), 0);
         }
     }
@@ -7778,6 +8220,9 @@
 
     for (;;) {
         sp<IOProfile> firstInexact = nullptr;
+        uint32_t inexactSamplingRate = 0;
+        audio_format_t inexactFormat = AUDIO_FORMAT_INVALID;
+        audio_channel_mask_t inexactChannelMask = AUDIO_CHANNEL_INVALID;
         uint32_t updatedSamplingRate = 0;
         audio_format_t updatedFormat = AUDIO_FORMAT_INVALID;
         audio_channel_mask_t updatedChannelMask = AUDIO_CHANNEL_INVALID;
@@ -7815,14 +8260,17 @@
                                 false /*exactMatchRequiredForInputFlags*/)
                                 != IOProfile::NO_MATCH) {
                     firstInexact = profile;
+                    inexactSamplingRate = updatedSamplingRate;
+                    inexactFormat = updatedFormat;
+                    inexactChannelMask = updatedChannelMask;
                 }
             }
         }
 
         if (firstInexact != nullptr) {
-            samplingRate = updatedSamplingRate;
-            format = updatedFormat;
-            channelMask = updatedChannelMask;
+            samplingRate = inexactSamplingRate;
+            format = inexactFormat;
+            channelMask = inexactChannelMask;
             return firstInexact;
         } else if (flags & AUDIO_INPUT_FLAG_RAW) {
             flags = (audio_input_flags_t) (flags & ~AUDIO_INPUT_FLAG_RAW); // retry
@@ -7840,14 +8288,63 @@
     return nullptr;
 }
 
+float AudioPolicyManager::adjustDeviceAttenuationForAbsVolume(IVolumeCurves &curves,
+                                                              VolumeSource volumeSource,
+                                                              int index,
+                                                              const DeviceTypeSet &deviceTypes)
+{
+    audio_devices_t volumeDevice = Volume::getDeviceForVolume(deviceTypes);
+    device_category deviceCategory = Volume::getDeviceCategory({volumeDevice});
+    float volumeDb = curves.volIndexToDb(deviceCategory, index);
+
+    if (com_android_media_audio_abs_volume_index_fix()) {
+        if (mAbsoluteVolumeDrivingStreams.find(volumeDevice) !=
+            mAbsoluteVolumeDrivingStreams.end()) {
+            audio_attributes_t attributesToDriveAbs = mAbsoluteVolumeDrivingStreams[volumeDevice];
+            auto groupToDriveAbs = mEngine->getVolumeGroupForAttributes(attributesToDriveAbs);
+            if (groupToDriveAbs == VOLUME_GROUP_NONE) {
+                ALOGD("%s: no group matching with %s", __FUNCTION__,
+                      toString(attributesToDriveAbs).c_str());
+                return volumeDb;
+            }
+
+            float volumeDbMax = curves.volIndexToDb(deviceCategory, curves.getVolumeIndexMax());
+            VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
+            if (vsToDriveAbs == volumeSource) {
+                // attenuation is applied by the abs volume controller
+                return (index != 0) ? volumeDbMax : volumeDb;
+            } else {
+                IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
+                int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
+                float volumeDbAbs = curvesAbs.volIndexToDb(deviceCategory, indexAbs);
+                float volumeDbAbsMax = curvesAbs.volIndexToDb(deviceCategory,
+                                                              curvesAbs.getVolumeIndexMax());
+                float newVolumeDb = fminf(volumeDb + volumeDbAbsMax - volumeDbAbs, volumeDbMax);
+                ALOGV("%s: abs vol stream %d with attenuation %f is adjusting stream %d from "
+                      "attenuation %f to attenuation %f %f", __func__, vsToDriveAbs, volumeDbAbs,
+                      volumeSource, volumeDb, newVolumeDb, volumeDbMax);
+                return newVolumeDb;
+            }
+        }
+        return volumeDb;
+    } else {
+        return volumeDb;
+    }
+}
+
 float AudioPolicyManager::computeVolume(IVolumeCurves &curves,
                                         VolumeSource volumeSource,
                                         int index,
                                         const DeviceTypeSet& deviceTypes,
+                                        bool adjustAttenuation,
                                         bool computeInternalInteraction)
 {
-    float volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
-
+    float volumeDb;
+    if (adjustAttenuation) {
+        volumeDb = adjustDeviceAttenuationForAbsVolume(curves, volumeSource, index, deviceTypes);
+    } else {
+        volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
+    }
     ALOGV("%s volume source %d, index %d,  devices %s, compute internal %b ", __func__,
           volumeSource, index, dumpDeviceTypes(deviceTypes).c_str(), computeInternalInteraction);
 
@@ -7868,7 +8365,8 @@
             mOutputs.isActive(ringVolumeSrc, 0)) {
         auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
         const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes,
-                /* computeInternalInteraction= */ false);
+                                                 adjustAttenuation,
+                                                 /* computeInternalInteraction= */false);
         return ringVolumeDb - 4 > volumeDb ? ringVolumeDb - 4 : volumeDb;
     }
 
@@ -7886,7 +8384,7 @@
         int voiceVolumeIndex = voiceCurves.getVolumeIndex(deviceTypes);
         const float maxVoiceVolDb =
                 computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes,
-                              /* computeInternalInteraction= */ false)
+                        adjustAttenuation, /* computeInternalInteraction= */false)
                 + IN_CALL_EARPIECE_HEADROOM_DB;
         // FIXME: Workaround for call screening applications until a proper audio mode is defined
         // to support this scenario : Exempt the RING stream from the audio cap if the audio was
@@ -7939,7 +8437,8 @@
                                              musicVolumeSrc,
                                              musicCurves.getVolumeIndex(musicDevice),
                                              musicDevice,
-                                             /* computeInternalInteraction= */ false);
+                                              adjustAttenuation,
+                                              /* computeInternalInteraction= */ false);
             float minVolDb = (musicVolDb > SONIFICATION_HEADSET_VOLUME_MIN_DB) ?
                         musicVolDb : SONIFICATION_HEADSET_VOLUME_MIN_DB;
             if (volumeDb > minVolDb) {
@@ -7948,9 +8447,10 @@
             }
             if (Volume::getDeviceForVolume(deviceTypes) != AUDIO_DEVICE_OUT_SPEAKER
                     &&  !Intersection(deviceTypes, {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES}).empty()) {
-                // on A2DP, also ensure notification volume is not too low compared to media when
-                // intended to be played
+                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
+                        AUDIO_DEVICE_OUT_BLE_HEADSET}).empty()) {
+                // on A2DP/BLE, also ensure notification volume is not too low compared to media
+                // when intended to be played.
                 if ((volumeDb > -96.0f) &&
                         (musicVolDb - SONIFICATION_A2DP_MAX_MEDIA_DIFF_DB > volumeDb)) {
                     ALOGV("%s increasing volume for volume source=%d device=%s from %f to %f",
@@ -8002,6 +8502,9 @@
                                                int delayMs,
                                                bool force)
 {
+    // APM is single threaded, and single instance.
+    static std::set<IVolumeCurves*> invalidCurvesReported;
+
     // do not change actual attributes volume if the attributes is muted
     if (outputDesc->isMuted(volumeSource)) {
         ALOGVV("%s: volume source %d muted count %d active=%d", __func__, volumeSource,
@@ -8021,20 +8524,26 @@
     if (deviceTypes.empty()) {
         deviceTypes = outputDesc->devices().types();
         index = curves.getVolumeIndex(deviceTypes);
-        ALOGD("%s if deviceTypes is change from none to device %s, need get index %d",
+        ALOGV("%s if deviceTypes is change from none to device %s, need get index %d",
                 __func__, dumpDeviceTypes(deviceTypes).c_str(), index);
     }
 
     if (curves.getVolumeIndexMin() < 0 || curves.getVolumeIndexMax() < 0) {
-        ALOGE("invalid volume index range");
+        if (!invalidCurvesReported.count(&curves)) {
+            invalidCurvesReported.insert(&curves);
+            String8 dump;
+            curves.dump(&dump);
+            ALOGE("invalid volume index range in the curve:\n%s", dump.c_str());
+        }
         return BAD_VALUE;
     }
 
     float volumeDb = computeVolume(curves, volumeSource, index, deviceTypes);
     if (outputDesc->isFixedVolume(deviceTypes) ||
-            // Force VoIP volume to max for bluetooth SCO device except if muted
+            // Force VoIP volume to max for bluetooth SCO/BLE device except if muted
             (index != 0 && (isVoiceVolSrc || isBtScoVolSrc) &&
-                    isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device))) {
+                    (isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device)
+                    || isSingleDeviceType(deviceTypes, audio_is_ble_out_device)))) {
         volumeDb = 0.0f;
     }
     const bool muted = (index == 0) && (volumeDb != 0.0f);
@@ -8042,17 +8551,19 @@
             deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
-        setVoiceVolume(index, curves, isVoiceVolSrc, delayMs);
+        bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+                !isSingleDeviceType(deviceTypes, audio_is_ble_out_device);
+        setVoiceVolume(index, curves, voiceVolumeManagedByHost, delayMs);
     }
     return NO_ERROR;
 }
 
 void AudioPolicyManager::setVoiceVolume(
-        int index, IVolumeCurves &curves, bool isVoiceVolSrc, int delayMs) {
+        int index, IVolumeCurves &curves, bool voiceVolumeManagedByHost, int delayMs) {
     float voiceVolume;
-    // Force voice volume to max or mute for Bluetooth SCO as other attenuations are managed
+    // Force voice volume to max or mute for Bluetooth SCO/BLE as other attenuations are managed
     // by the headset
-    if (isVoiceVolSrc) {
+    if (voiceVolumeManagedByHost) {
         voiceVolume = (float)index/(float)curves.getVolumeIndexMax();
     } else {
         voiceVolume = index == 0 ? 0.0 : 1.0;
@@ -8069,11 +8580,19 @@
                                                    bool& isBtScoVolSrc,
                                                    const char* caller) {
     const VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
-    const VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
+    isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+
     const bool isScoRequested = isScoRequestedForComm();
     const bool isHAUsed = isHearingAidUsedForComm();
 
-    isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+    if (com_android_media_audio_replace_stream_bt_sco()) {
+        ALOGV("%s stream bt sco is replaced, no volume consistency check for calls", __func__);
+        isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource) &&
+                        (isScoRequested || isHAUsed);
+        return true;
+    }
+
+    const VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
     isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
 
     if ((callVolSrc != btScoVolSrc) &&
@@ -8230,7 +8749,7 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc->isConnected() && (sourceDesc->srcDevice()->equals(deviceDesc) ||
                                           sourceDesc->sinkDevice()->equals(deviceDesc))
-                && !isCallRxAudioSource(sourceDesc)) {
+                && !sourceDesc->isCallRx()) {
             disconnectAudioSource(sourceDesc);
         }
     }
@@ -8516,8 +9035,9 @@
     }
     sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
     status_t status = desc->open(halConfig, mixerConfig, devices,
-            AUDIO_STREAM_DEFAULT, flags, &output);
+            AUDIO_STREAM_DEFAULT, &flags, &output, attributes);
     if (status != NO_ERROR) {
         ALOGE("%s failed to open output %d", __func__, status);
         return nullptr;
@@ -8555,14 +9075,19 @@
         config.offload_info.channel_mask = config.channel_mask;
         config.offload_info.format = config.format;
 
-        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output);
+        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, &flags, &output,
+                            attributes);
         if (status != NO_ERROR) {
             return nullptr;
         }
     }
 
     addOutput(output, desc);
-
+    // The version check is essentially to avoid making this call in the case of the HIDL HAL.
+    if (auto hwModule = mHwModules.getModuleFromHandle(mPrimaryModuleHandle); hwModule &&
+            hwModule->getHalVersionMajor() >= 3) {
+        setOutputDevices(__func__, desc, devices, true, 0, NULL);
+    }
     sp<DeviceDescriptor> speaker = mAvailableOutputDevices.getDevice(
             AUDIO_DEVICE_OUT_SPEAKER, String8(""), AUDIO_FORMAT_DEFAULT);
 
@@ -8613,6 +9138,13 @@
 
 status_t AudioPolicyManager::getDevicesForAttributes(
         const audio_attributes_t &attr, DeviceVector &devices, bool forVolume) {
+    // attr containing source set by AudioAttributes.Builder.setCapturePreset() has precedence
+    // over any usage or content type also present in attr.
+    if (com::android::media::audioserver::enable_audio_input_device_routing() &&
+        attr.source != AUDIO_SOURCE_INVALID) {
+        return getInputDevicesForAttributes(attr, devices);
+    }
+
     // Devices are determined in the following precedence:
     //
     // 1) Devices associated with a dynamic policy matching the attributes.  This is often
@@ -8676,6 +9208,15 @@
     return NO_ERROR;
 }
 
+status_t AudioPolicyManager::getInputDevicesForAttributes(
+        const audio_attributes_t &attr, DeviceVector &devices) {
+    devices = DeviceVector(
+            mEngine->getInputDeviceForAttributes(attr, 0 /*uid unknown here*/,
+                                                 AUDIO_SESSION_NONE,
+                                                 nullptr /* mix */));
+    return NO_ERROR;
+}
+
 status_t AudioPolicyManager::getProfilesForDevices(const DeviceVector& devices,
                                                    AudioProfileVector& audioProfiles,
                                                    uint32_t flags,
@@ -8760,4 +9301,60 @@
     mpClientInterface->invalidateTracks(clients);
 }
 
+void AudioPolicyManager::updateClientsInternalMute(
+        const sp<android::SwAudioOutputDescriptor> &desc) {
+    if (!desc->isBitPerfect() ||
+        !com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        // This is only used for bit perfect output now.
+        return;
+    }
+    sp<TrackClientDescriptor> bitPerfectClient = nullptr;
+    bool bitPerfectClientInternalMute = false;
+    std::vector<media::TrackInternalMuteInfo> clientsInternalMute;
+    for (const sp<TrackClientDescriptor>& client : desc->getActiveClients()) {
+        if ((client->flags() & AUDIO_OUTPUT_FLAG_BIT_PERFECT) != AUDIO_OUTPUT_FLAG_NONE) {
+            bitPerfectClient = client;
+            continue;
+        }
+        bool muted = false;
+        if (client->stream() == AUDIO_STREAM_SYSTEM) {
+            // System sound is muted.
+            muted = true;
+        } else {
+            bitPerfectClientInternalMute = true;
+        }
+        if (client->setInternalMute(muted)) {
+            auto result = legacy2aidl_audio_port_handle_t_int32_t(client->portId());
+            if (!result.ok()) {
+                ALOGE("%s, failed to convert port id(%d) to aidl", __func__, client->portId());
+                continue;
+            }
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = client->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        }
+    }
+    if (bitPerfectClient != nullptr &&
+        bitPerfectClient->setInternalMute(bitPerfectClientInternalMute)) {
+        auto result = legacy2aidl_audio_port_handle_t_int32_t(bitPerfectClient->portId());
+        if (result.ok()) {
+            media::TrackInternalMuteInfo info;
+            info.portId = result.value();
+            info.muted = bitPerfectClient->getInternalMute();
+            clientsInternalMute.push_back(std::move(info));
+        } else {
+            ALOGE("%s, failed to convert port id(%d) of bit perfect client to aidl",
+                  __func__, bitPerfectClient->portId());
+        }
+    }
+    if (!clientsInternalMute.empty()) {
+        if (status_t status = mpClientInterface->setTracksInternalMute(clientsInternalMute);
+                status != NO_ERROR) {
+            ALOGE("%s, failed to update tracks internal mute, err=%d", __func__, status);
+        }
+    }
+}
+
 } // namespace android
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 50e8ed8..9ad2ea5 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -103,7 +103,7 @@
         virtual status_t setDeviceConnectionState(audio_policy_dev_state_t state,
                 const android::media::audio::common::AudioPort& port, audio_format_t encodedFormat);
         virtual audio_policy_dev_state_t getDeviceConnectionState(audio_devices_t device,
-                                                                              const char *device_address);
+                                                                  const char *device_address);
         virtual status_t handleDeviceConfigChange(audio_devices_t device,
                                                   const char *device_address,
                                                   const char *device_name,
@@ -128,7 +128,8 @@
                                   std::vector<audio_io_handle_t> *secondaryOutputs,
                                   output_type_t *outputType,
                                   bool *isSpatialized,
-                                  bool *isBitPerfect) override;
+                                  bool *isBitPerfect,
+                                  float *volume) override;
         virtual status_t startOutput(audio_port_handle_t portId);
         virtual status_t stopOutput(audio_port_handle_t portId);
         virtual bool releaseOutput(audio_port_handle_t portId);
@@ -151,6 +152,10 @@
         virtual status_t stopInput(audio_port_handle_t portId);
         virtual void releaseInput(audio_port_handle_t portId);
         virtual void checkCloseInputs();
+        virtual status_t setDeviceAbsoluteVolumeEnabled(audio_devices_t deviceType,
+                                                        const char *address,
+                                                        bool enabled,
+                                                        audio_stream_type_t streamToDriveAbs);
         /**
          * @brief initStreamVolume: even if the engine volume files provides min and max, keep this
          * api for compatibility reason.
@@ -341,8 +346,7 @@
         virtual status_t startAudioSource(const struct audio_port_config *source,
                                           const audio_attributes_t *attributes,
                                           audio_port_handle_t *portId,
-                                          uid_t uid,
-                                          bool internal = false);
+                                          uid_t uid);
         virtual status_t stopAudioSource(audio_port_handle_t portId);
 
         virtual status_t setMasterMono(bool mono);
@@ -529,6 +533,7 @@
         void addOutput(audio_io_handle_t output, const sp<SwAudioOutputDescriptor>& outputDesc);
         void removeOutput(audio_io_handle_t output);
         void addInput(audio_io_handle_t input, const sp<AudioInputDescriptor>& inputDesc);
+        bool checkCloseInput(const sp<AudioInputDescriptor>& input);
 
         /**
          * @brief setOutputDevices change the route of the specified output.
@@ -586,6 +591,8 @@
          * @param index index to match in the volume curves for the calculation
          * @param deviceTypes devices that should be considered in the volume curves for the
          *        calculation
+         * @param adjustAttenuation boolean indicating whether we should adjust the value to
+         *        avoid double attenuation when controlling an avrcp device
          * @param computeInternalInteraction boolean indicating whether recursive volume computation
          *        should continue within the volume computation. Defaults to {@code true} so the
          *        volume interactions can be computed. Calls within the method should always set the
@@ -594,6 +601,7 @@
          */
         virtual float computeVolume(IVolumeCurves &curves, VolumeSource volumeSource,
                                int index, const DeviceTypeSet& deviceTypes,
+                               bool adjustAttenuation = true,
                                bool computeInternalInteraction = true);
 
         // rescale volume index from srcStream within range of dstStream
@@ -700,15 +708,7 @@
         void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0,
                 bool skipDelays = false);
 
-        bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
-        }
-
-        bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
-        }
-
-        void connectTelephonyRxAudioSource();
+        void connectTelephonyRxAudioSource(uint32_t delayMs);
 
         void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
 
@@ -933,7 +933,8 @@
 
         status_t hasPrimaryOutput() const { return mPrimaryOutput != 0; }
 
-        status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
+        status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+                                    uint32_t delayMs);
         status_t disconnectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
 
         status_t connectAudioSourceToSink(const sp<SourceClientDescriptor>& sourceDesc,
@@ -971,6 +972,13 @@
         void checkLeBroadcastRoutes(bool wasUnicastActive,
                 sp<SwAudioOutputDescriptor> ignoredOutput, uint32_t delayMs);
 
+        status_t startAudioSourceInternal(const struct audio_port_config *source,
+                                          const audio_attributes_t *attributes,
+                                          audio_port_handle_t *portId,
+                                          uid_t uid,
+                                          bool internal,
+                                          bool isCallRx,
+                                          uint32_t delayMs);
         const uid_t mUidCached;                         // AID_AUDIOSERVER
         sp<const AudioPolicyConfig> mConfig;
         EngineInstance mEngine;                         // Audio Policy Engine instance
@@ -1101,8 +1109,8 @@
         // It can give a chance to HAL implementer to retrieve dynamic capabilities associated
         // to this device for example.
         // TODO avoid opening stream to retrieve capabilities of a profile.
-        void broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
-                                            media::DeviceConnectedState state);
+        status_t broadcastDeviceConnectionState(const sp<DeviceDescriptor> &device,
+                                                media::DeviceConnectedState state);
 
         // updates device caching and output for streams that can influence the
         //    routing of notifications
@@ -1148,7 +1156,8 @@
                 const audio_config_t *config,
                 audio_output_flags_t flags,
                 const DeviceVector &devices,
-                audio_io_handle_t *output);
+                audio_io_handle_t *output,
+                audio_attributes_t attributes);
 
         /**
          * @brief Queries if some kind of spatialization will be performed if the audio playback
@@ -1357,6 +1366,11 @@
                                          DeviceVector &devices,
                                          bool forVolume);
 
+        // A helper method used by getDevicesForAttributes to retrieve input devices when
+        // capture preset is available in the given audio attributes parameter.
+        status_t getInputDevicesForAttributes(const audio_attributes_t &attr,
+                                              DeviceVector &devices);
+
         status_t getProfilesForDevices(const DeviceVector& devices,
                                        AudioProfileVector& audioProfiles,
                                        uint32_t flags,
@@ -1383,6 +1397,20 @@
 
         PortHandleVector getClientsForStream(audio_stream_type_t streamType) const;
         void invalidateStreams(StreamTypeVector streams) const;
+
+        bool checkHapticCompatibilityOnSpatializerOutput(const audio_config_t* config,
+                                                         audio_session_t sessionId) const;
+
+        void updateClientsInternalMute(const sp<SwAudioOutputDescriptor>& desc);
+
+        float adjustDeviceAttenuationForAbsVolume(IVolumeCurves &curves,
+                                                  VolumeSource volumeSource,
+                                                  int index,
+                                                  const DeviceTypeSet &deviceTypes);
+
+        // Contains for devices that support absolute volume the audio attributes
+        // corresponding to the streams that are driving the volume changes
+        std::unordered_map<audio_devices_t, audio_attributes_t> mAbsoluteVolumeDrivingStreams;
 };
 
 };
diff --git a/services/audiopolicy/permission/Android.bp b/services/audiopolicy/permission/Android.bp
new file mode 100644
index 0000000..cfbeaae
--- /dev/null
+++ b/services/audiopolicy/permission/Android.bp
@@ -0,0 +1,111 @@
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+cc_library_headers {
+    name: "audiopermissioncontroller_headers",
+    host_supported: true,
+    export_include_dirs: ["include"],
+}
+
+cc_library {
+    name: "audiopermissioncontroller",
+
+    srcs: [
+        "NativePermissionController.cpp",
+        "ValidatedAttributionSourceState.cpp",
+    ],
+    export_include_dirs: [
+        "include",
+    ],
+
+    header_libs: [
+        "libcutils_headers",
+        "liberror_headers",
+    ],
+    export_header_lib_headers: [
+        "liberror_headers",
+    ],
+    static_libs: [
+        "audio-permission-aidl-cpp",
+        "framework-permission-aidl-cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+
+    host_supported: true,
+    sanitize: {
+        integer_overflow: true,
+    },
+    cflags: [
+        "-DANDROID_BASE_UNIQUE_FD_DISABLE_IMPLICIT_CONVERSION",
+        "-DANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION",
+        "-Wall",
+        "-Wconditional-uninitialized",
+        "-Wdeprecated",
+        "-Werror",
+        "-Werror=format",
+        "-Werror=reorder-init-list",
+        "-Wextra",
+        "-Wextra-semi",
+        "-Wimplicit-fallthrough",
+        "-Wreorder-init-list",
+        "-Wshadow-all",
+        "-Wthread-safety",
+        "-Wunreachable-code-aggressive",
+    ],
+    tidy: true,
+    tidy_checks: [
+        "android-*",
+        "bugprone-*",
+        "cert-*",
+        "clang-analyzer-security*",
+        "google-*",
+        "misc-*",
+        "modernize-*",
+        "performance-*",
+    ],
+    tidy_checks_as_errors: [
+        "android-*",
+        "bugprone-*",
+        "cert-*",
+        "clang-analyzer-security*",
+        "google-*",
+        "misc-*",
+        "modernize-*",
+        "performance-*",
+    ],
+}
+
+cc_test {
+    name: "audiopermissioncontroller_test",
+    host_supported: true,
+    defaults: [
+        "libmediautils_tests_config",
+    ],
+    static_libs: [
+        "audio-permission-aidl-cpp",
+        "audiopermissioncontroller",
+        "framework-permission-aidl-cpp",
+        "libgmock",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder",
+        "liblog",
+        "libutils",
+    ],
+    srcs: [
+        "tests/NativePermissionControllerTest.cpp",
+        "tests/ValidatedAttributionSourceStateTest.cpp",
+    ],
+    test_options: {
+        unit_test: true,
+    },
+    test_suites: ["general-tests"],
+}
diff --git a/services/audiopolicy/permission/NativePermissionController.cpp b/services/audiopolicy/permission/NativePermissionController.cpp
new file mode 100644
index 0000000..5743076
--- /dev/null
+++ b/services/audiopolicy/permission/NativePermissionController.cpp
@@ -0,0 +1,160 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/NativePermissionController.h>
+
+#include <algorithm>
+#include <optional>
+#include <utility>
+
+#include <android-base/expected.h>
+#include <cutils/android_filesystem_config.h>
+#include <utils/Errors.h>
+
+using ::android::binder::Status;
+using ::android::error::BinderResult;
+using ::android::error::unexpectedExceptionCode;
+
+namespace com::android::media::permission {
+static std::optional<std::string> getFixedPackageName(uid_t uid) {
+    // These values are in sync with AppOpsService
+    switch (uid % AID_USER_OFFSET) {
+        case AID_ROOT:
+            return "root";
+        case AID_SYSTEM:
+            return "system";
+        case AID_SHELL:
+            return "shell";
+        case AID_MEDIA:
+            return "media";
+        case AID_AUDIOSERVER:
+            return "audioserver";
+        case AID_CAMERASERVER:
+            return "cameraserver";
+        default:
+            return std::nullopt;
+    }
+}
+
+// -- Begin Binder methods
+Status NativePermissionController::populatePackagesForUids(
+        const std::vector<UidPackageState>& initialPackageStates) {
+    std::lock_guard l{m_};
+    if (!is_package_populated_) is_package_populated_ = true;
+    package_map_.clear();
+    std::transform(initialPackageStates.begin(), initialPackageStates.end(),
+                   std::inserter(package_map_, package_map_.end()),
+                   [](const auto& x) -> std::pair<uid_t, std::vector<std::string>> {
+                       return {x.uid, x.packageNames};
+                   });
+    std::erase_if(package_map_, [](const auto& x) { return x.second.empty(); });
+    return Status::ok();
+}
+
+Status NativePermissionController::updatePackagesForUid(const UidPackageState& newPackageState) {
+    std::lock_guard l{m_};
+    package_map_.insert_or_assign(newPackageState.uid, newPackageState.packageNames);
+    const auto& cursor = package_map_.find(newPackageState.uid);
+
+    if (newPackageState.packageNames.empty()) {
+        if (cursor != package_map_.end()) {
+            package_map_.erase(cursor);
+        }
+    } else {
+        if (cursor != package_map_.end()) {
+            cursor->second = newPackageState.packageNames;
+        } else {
+            package_map_.insert({newPackageState.uid, newPackageState.packageNames});
+        }
+    }
+    return Status::ok();
+}
+
+Status NativePermissionController::populatePermissionState(PermissionEnum perm,
+                                                           const std::vector<int>& uids) {
+    if (perm >= PermissionEnum::ENUM_SIZE || static_cast<int>(perm) < 0) {
+        return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT);
+    }
+    std::lock_guard l{m_};
+    auto& cursor = permission_map_[static_cast<size_t>(perm)];
+    cursor = std::vector<uid_t>{uids.begin(), uids.end()};
+    // should be sorted
+    std::sort(cursor.begin(), cursor.end());
+    return Status::ok();
+}
+
+// -- End Binder methods
+
+BinderResult<std::vector<std::string>> NativePermissionController::getPackagesForUid(
+        uid_t uid) const {
+    uid = uid % AID_USER_OFFSET;
+    const auto fixed_package_opt = getFixedPackageName(uid);
+    if (fixed_package_opt.has_value()) {
+        return BinderResult<std::vector<std::string>>{std::in_place_t{},
+                                                      {fixed_package_opt.value()}};
+    }
+    std::lock_guard l{m_};
+    if (!is_package_populated_) {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_STATE,
+                "NPC::getPackagesForUid: controller never populated by system_server");
+    }
+    const auto cursor = package_map_.find(uid);
+    if (cursor != package_map_.end()) {
+        return cursor->second;
+    } else {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_ARGUMENT,
+                ("NPC::getPackagesForUid: uid not found: " + std::to_string(uid)).c_str());
+    }
+}
+
+BinderResult<bool> NativePermissionController::validateUidPackagePair(
+        uid_t uid, const std::string& packageName) const {
+    if (uid == AID_ROOT || uid == AID_SYSTEM) return true;
+    uid = uid % AID_USER_OFFSET;
+    const auto fixed_package_opt = getFixedPackageName(uid);
+    if (fixed_package_opt.has_value()) {
+        return (uid == AID_ROOT || uid == AID_SYSTEM) ? true :
+                packageName == fixed_package_opt.value();
+    }
+    std::lock_guard l{m_};
+    if (!is_package_populated_) {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_STATE,
+                "NPC::validatedUidPackagePair: controller never populated by system_server");
+    }
+    const auto cursor = package_map_.find(uid);
+    return (cursor != package_map_.end()) &&
+           (std::find(cursor->second.begin(), cursor->second.end(), packageName) !=
+            cursor->second.end());
+}
+
+BinderResult<bool> NativePermissionController::checkPermission(PermissionEnum perm,
+                                                               uid_t uid) const {
+    if (uid == AID_ROOT || uid == AID_SYSTEM || uid == getuid()) return true;
+    std::lock_guard l{m_};
+    const auto& uids = permission_map_[static_cast<size_t>(perm)];
+    if (!uids.empty()) {
+        return std::binary_search(uids.begin(), uids.end(), uid);
+    } else {
+        return unexpectedExceptionCode(
+                Status::EX_ILLEGAL_STATE,
+                "NPC::checkPermission: controller never populated by system_server");
+    }
+}
+
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/ValidatedAttributionSourceState.cpp b/services/audiopolicy/permission/ValidatedAttributionSourceState.cpp
new file mode 100644
index 0000000..f313422
--- /dev/null
+++ b/services/audiopolicy/permission/ValidatedAttributionSourceState.cpp
@@ -0,0 +1,61 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/ValidatedAttributionSourceState.h>
+
+#include <binder/IPCThreadState.h>
+#include <error/expected_utils.h>
+#include <utils/Log.h>
+
+using ::android::binder::Status;
+using ::android::error::BinderResult;
+using ::android::error::unexpectedExceptionCode;
+
+namespace com::android::media::permission {
+
+BinderResult<ValidatedAttributionSourceState>
+ValidatedAttributionSourceState::createFromBinderContext(AttributionSourceState attr,
+                                                         const IPermissionProvider& provider) {
+    attr.pid = ::android::IPCThreadState::self()->getCallingPid();
+    attr.uid = ::android::IPCThreadState::self()->getCallingUid();
+    return createFromTrustedUidNoPackage(std::move(attr), provider);
+}
+
+BinderResult<ValidatedAttributionSourceState>
+ValidatedAttributionSourceState::createFromTrustedUidNoPackage(
+        AttributionSourceState attr, const IPermissionProvider& provider) {
+    if (attr.packageName.has_value() && attr.packageName->size() != 0) {
+        if (VALUE_OR_RETURN(provider.validateUidPackagePair(attr.uid, attr.packageName.value()))) {
+            return ValidatedAttributionSourceState{std::move(attr)};
+        } else {
+            return unexpectedExceptionCode(Status::EX_SECURITY,
+                                           attr.toString()
+                                                   .insert(0, ": invalid attr ")
+                                                   .insert(0, __PRETTY_FUNCTION__)
+                                                   .c_str());
+        }
+    } else {
+        // For APIs which don't appropriately pass attribution sources or packages, we need
+        // to populate the package name with our best guess.
+        const auto packageNames = VALUE_OR_RETURN(provider.getPackagesForUid(attr.uid));
+        LOG_ALWAYS_FATAL_IF(packageNames.empty(), "%s BUG: empty package list from controller",
+                            __PRETTY_FUNCTION__);
+        attr.packageName = std::move(packageNames[0]);
+        return ValidatedAttributionSourceState{std::move(attr)};
+    }
+}
+
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/include/media/IPermissionProvider.h b/services/audiopolicy/permission/include/media/IPermissionProvider.h
new file mode 100644
index 0000000..8d90543
--- /dev/null
+++ b/services/audiopolicy/permission/include/media/IPermissionProvider.h
@@ -0,0 +1,50 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <sys/types.h>
+
+#include <optional>
+#include <vector>
+
+#include <com/android/media/permission/PermissionEnum.h>
+#include <error/BinderResult.h>
+
+namespace com::android::media::permission {
+
+class IPermissionProvider {
+  public:
+    // Get all package names which run under a certain app-id. Returns non-empty.
+    // Not user specific, since packages are across users. Special app-ids (system,
+    // shell, etc.) are handled.  Fails if the provider does not know about the
+    // app-id or if the provider has not been initialized.
+    virtual ::android::error::BinderResult<std::vector<std::string>> getPackagesForUid(
+            uid_t uid) const = 0;
+    // True iff the provided package name runs under the app-id of uid.
+    // Special app-ids (system, shell, etc.) are handled.
+    // Fails if the provider does not know about the app-id or if the provider has not been
+    // initialized.
+    virtual ::android::error::BinderResult<bool> validateUidPackagePair(
+            uid_t uid, const std::string& packageName) const = 0;
+
+    // True iff the uid holds the permission (user aware).
+    // Fails with NO_INIT if cache hasn't been populated.
+    virtual ::android::error::BinderResult<bool> checkPermission(PermissionEnum permission,
+                                                                 uid_t uid) const = 0;
+    virtual ~IPermissionProvider() = default;
+};
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/include/media/NativePermissionController.h b/services/audiopolicy/permission/include/media/NativePermissionController.h
new file mode 100644
index 0000000..a81c7a2
--- /dev/null
+++ b/services/audiopolicy/permission/include/media/NativePermissionController.h
@@ -0,0 +1,56 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <mutex>
+#include <optional>
+#include <unordered_map>
+
+#include "IPermissionProvider.h"
+
+#include <android-base/thread_annotations.h>
+#include <com/android/media/permission/BnNativePermissionController.h>
+#include <error/BinderResult.h>
+
+namespace com::android::media::permission {
+
+class NativePermissionController : public BnNativePermissionController, public IPermissionProvider {
+    using Status = ::android::binder::Status;
+
+  public:
+    Status populatePackagesForUids(const std::vector<UidPackageState>& initialPackageStates) final;
+    Status updatePackagesForUid(const UidPackageState& newPackageState) final;
+    Status populatePermissionState(PermissionEnum permission, const std::vector<int>& uids) final;
+    // end binder methods
+
+    ::android::error::BinderResult<std::vector<std::string>> getPackagesForUid(
+            uid_t uid) const final;
+    ::android::error::BinderResult<bool> validateUidPackagePair(
+            uid_t uid, const std::string& packageName) const final;
+    ::android::error::BinderResult<bool> checkPermission(PermissionEnum permission,
+                                                         uid_t uid) const final;
+
+  private:
+    mutable std::mutex m_;
+    // map of app_ids to the set of packages names which could run in them (should be 1)
+    std::unordered_map<uid_t, std::vector<std::string>> package_map_ GUARDED_BY(m_);
+    bool is_package_populated_ GUARDED_BY(m_);
+    // (logical) map of PermissionEnum to list of uids (not appid) which hold the perm
+    std::array<std::vector<uid_t>, static_cast<size_t>(PermissionEnum::ENUM_SIZE)> permission_map_
+            GUARDED_BY(m_);
+};
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/include/media/ValidatedAttributionSourceState.h b/services/audiopolicy/permission/include/media/ValidatedAttributionSourceState.h
new file mode 100644
index 0000000..46f7d0a
--- /dev/null
+++ b/services/audiopolicy/permission/include/media/ValidatedAttributionSourceState.h
@@ -0,0 +1,72 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/content/AttributionSourceState.h>
+#include <error/BinderResult.h>
+
+#include "IPermissionProvider.h"
+
+namespace com::android::media::permission {
+
+using ::android::content::AttributionSourceState;
+
+class ValidatedAttributionSourceState {
+  public:
+    /**
+     * Validates an attribution source from within the context of a binder transaction.
+     * Overwrites the uid/pid and validates the packageName.
+     * Returns EX_SECURITY on package validation fail.
+     */
+    static ::android::error::BinderResult<ValidatedAttributionSourceState> createFromBinderContext(
+            AttributionSourceState attr, const IPermissionProvider& provider);
+
+    /**
+     * Creates a ValidatedAttributionSourceState in cases where the source is passed from a
+     * trusted entity which already performed validation.
+     */
+    static ValidatedAttributionSourceState createFromTrustedSource(AttributionSourceState attr) {
+        return ValidatedAttributionSourceState(attr);
+    }
+
+    /**
+     * Create a ValidatedAttribubtionSourceState in cases where the uid/pid is trusted, but the
+     * packages have not been validated. Proper use of the previous two methods should avoid the
+     * necessity of this, but it is useful for migration purposes as well as testing this class.
+     * Returns EX_SECURITY on package validation fail.
+     */
+    static ::android::error::BinderResult<ValidatedAttributionSourceState>
+    createFromTrustedUidNoPackage(AttributionSourceState attr, const IPermissionProvider& provider);
+
+    operator AttributionSourceState() const { return state_; }
+
+    operator const AttributionSourceState&() const { return state_; }
+
+    AttributionSourceState unwrapInto() && { return std::move(state_); }
+
+    bool operator==(const ValidatedAttributionSourceState& other) const {
+        return operator==(other.state_);
+    }
+
+    bool operator==(const AttributionSourceState& other) const { return state_ == other; }
+
+  private:
+    ValidatedAttributionSourceState(AttributionSourceState attr) : state_(attr) {}
+
+    AttributionSourceState state_;
+};
+}  // namespace com::android::media::permission
diff --git a/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp b/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp
new file mode 100644
index 0000000..f2423c1
--- /dev/null
+++ b/services/audiopolicy/permission/tests/NativePermissionControllerTest.cpp
@@ -0,0 +1,211 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/NativePermissionController.h>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <error/BinderStatusMatcher.h>
+#include <error/ExpectedMatchers.h>
+
+using android::binder::Status::EX_ILLEGAL_ARGUMENT;
+using android::binder::Status::EX_ILLEGAL_STATE;
+using android::error::BinderStatusMatcher;
+using android::error::IsErrorAnd;
+using android::error::IsOkAnd;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::PermissionEnum;
+using com::android::media::permission::UidPackageState;
+
+using ::testing::ElementsAre;
+using ::testing::IsFalse;
+using ::testing::IsTrue;
+
+class NativePermissionControllerTest : public ::testing::Test {
+  protected:
+    android::sp<NativePermissionController> holder_ =
+            android::sp<NativePermissionController>::make();
+    NativePermissionController& controller_ = *holder_;
+};
+static UidPackageState createState(uid_t uid, std::vector<std::string> packagesNames) {
+    UidPackageState out{};
+    out.uid = uid;
+    out.packageNames = std::move(packagesNames);
+    return out;
+}
+
+// ---  Tests for non-populated ----
+TEST_F(NativePermissionControllerTest, getPackagesForUid_NotPopulated) {
+    // Verify errors are returned
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+    EXPECT_THAT(controller_.getPackagesForUid(10001),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+
+    // fixed uids should work
+    EXPECT_THAT(controller_.getPackagesForUid(1000), IsOkAnd(ElementsAre(std::string{"system"})));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_NotPopulated) {
+    // Verify errors are returned
+    EXPECT_THAT(controller_.validateUidPackagePair(10000, "com.package"),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+
+    // fixed uids should work
+    EXPECT_THAT(controller_.validateUidPackagePair(1000, "system"), IsOkAnd(IsTrue()));
+}
+
+// ---  Tests for populatePackagesForUids ----
+TEST_F(NativePermissionControllerTest, populatePackages_EmptyInput) {
+    std::vector<UidPackageState> input;
+
+    // succeeds
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    // Verify unknown uid behavior
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT)));
+}
+
+TEST_F(NativePermissionControllerTest, populatePackages_ValidInput) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsOkAnd(ElementsAre("com.example.app1", "com.example.app2")));
+    EXPECT_THAT(controller_.getPackagesForUid(10001), IsOkAnd(ElementsAre("com.example2.app1")));
+}
+
+// --- Tests for updatePackagesForUid ---
+TEST_F(NativePermissionControllerTest, updatePackages_NewUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+    UidPackageState newState = createState(12000, {"com.example.other"});
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+    EXPECT_THAT(controller_.updatePackagesForUid(newState), BinderStatusMatcher::isOk());
+
+    // Verify the results: only the updated package should be changed
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsOkAnd(ElementsAre("com.example.app1", "com.example.app2")));
+    EXPECT_THAT(controller_.getPackagesForUid(10001), IsOkAnd(ElementsAre("com.example2.app1")));
+    EXPECT_THAT(controller_.getPackagesForUid(12000), IsOkAnd(ElementsAre("com.example.other")));
+}
+
+TEST_F(NativePermissionControllerTest, updatePackages_ExistingUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2", "com.example.app3"}),
+            createState(10001, {"com.example2.app1"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+    // Update packages for existing uid
+    UidPackageState newState = createState(10000, {"com.example.other", "com.example.new"});
+    EXPECT_THAT(controller_.updatePackagesForUid(newState), BinderStatusMatcher::isOk());
+
+    // Verify update
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsOkAnd(ElementsAre("com.example.other", "com.example.new")));
+}
+
+TEST_F(NativePermissionControllerTest, updatePackages_EmptyRemovesEntry) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    UidPackageState newState{};  // Empty package list
+    newState.uid = 10000;
+    EXPECT_THAT(controller_.updatePackagesForUid(newState), BinderStatusMatcher::isOk());
+    // getPackages for unknown UID should error out
+    EXPECT_THAT(controller_.getPackagesForUid(10000),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT)));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_ValidPair) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.validateUidPackagePair(10000, "com.example.app1"), IsOkAnd(IsTrue()));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_InvalidPackage) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.validateUidPackagePair(10000, "com.example.other"), IsOkAnd(IsFalse()));
+}
+
+TEST_F(NativePermissionControllerTest, validateUidPackagePair_UnknownUid) {
+    std::vector<UidPackageState> input{
+            createState(10000, {"com.example.app1", "com.example.app2"}),
+    };
+
+    EXPECT_THAT(controller_.populatePackagesForUids(input), BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.validateUidPackagePair(12000, "any.package"), IsOkAnd(IsFalse()));
+}
+
+TEST_F(NativePermissionControllerTest, populatePermissionState_InvalidPermission) {
+    EXPECT_THAT(controller_.populatePermissionState(PermissionEnum::ENUM_SIZE, {}),
+                BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT));
+    EXPECT_THAT(
+            controller_.populatePermissionState(
+                    static_cast<PermissionEnum>(static_cast<int>(PermissionEnum::ENUM_SIZE) + 1),
+                    {}),
+            BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT));
+}
+
+TEST_F(NativePermissionControllerTest, populatePermissionState_HoldsPermission) {
+    // Unsorted
+    std::vector<int> uids{3, 1, 2, 4, 5};
+
+    EXPECT_THAT(controller_.populatePermissionState(PermissionEnum::MODIFY_AUDIO_ROUTING, uids),
+                BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.checkPermission(PermissionEnum::MODIFY_AUDIO_ROUTING, 3),
+                IsOkAnd(IsTrue()));
+}
+
+TEST_F(NativePermissionControllerTest, populatePermissionState_DoesNotHoldPermission) {
+    // Unsorted
+    std::vector<int> uids{3, 1, 2, 4, 5};
+
+    EXPECT_THAT(controller_.populatePermissionState(PermissionEnum::MODIFY_AUDIO_ROUTING, uids),
+                BinderStatusMatcher::isOk());
+
+    EXPECT_THAT(controller_.checkPermission(PermissionEnum::MODIFY_AUDIO_ROUTING, 6),
+                IsOkAnd(IsFalse()));
+}
+
+TEST_F(NativePermissionControllerTest, populatePermissionState_NotInitialized) {
+    EXPECT_THAT(controller_.checkPermission(PermissionEnum::MODIFY_AUDIO_ROUTING, 3),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+}
diff --git a/services/audiopolicy/permission/tests/ValidatedAttributionSourceStateTest.cpp b/services/audiopolicy/permission/tests/ValidatedAttributionSourceStateTest.cpp
new file mode 100644
index 0000000..0dd8814
--- /dev/null
+++ b/services/audiopolicy/permission/tests/ValidatedAttributionSourceStateTest.cpp
@@ -0,0 +1,125 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <media/ValidatedAttributionSourceState.h>
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <android-base/expected.h>
+#include <error/ExpectedMatchers.h>
+#include <media/IPermissionProvider.h>
+#include "error/BinderStatusMatcher.h"
+
+using ::android::base::unexpected;
+using ::android::binder::Status;
+using ::android::binder::Status::EX_ILLEGAL_ARGUMENT;
+using ::android::binder::Status::EX_ILLEGAL_STATE;
+using ::android::binder::Status::EX_SECURITY;
+using ::android::content::AttributionSourceState;
+using ::android::error::BinderResult;
+using ::android::error::BinderStatusMatcher;
+using ::android::error::IsErrorAnd;
+using ::android::error::IsOkAnd;
+using ::com::android::media::permission::IPermissionProvider;
+using ::com::android::media::permission::PermissionEnum;
+using ::com::android::media::permission::ValidatedAttributionSourceState;
+
+using ::testing::Eq;
+using ::testing::Return;
+
+class MockPermissionProvider : public IPermissionProvider {
+  public:
+    MOCK_METHOD(BinderResult<std::vector<std::string>>, getPackagesForUid, (uid_t uid),
+                (override, const));
+    MOCK_METHOD(BinderResult<bool>, validateUidPackagePair, (uid_t uid, const std::string&),
+                (override, const));
+    MOCK_METHOD(BinderResult<bool>, checkPermission, (PermissionEnum perm, uid_t),
+                (override, const));
+};
+
+class ValidatedAttributionSourceStateTest : public ::testing::Test {
+  protected:
+    MockPermissionProvider mMockProvider;
+    const uid_t mUid = 10001;
+    const std::vector<std::string> mPackageList{"com.package1", "com.package2"};
+};
+
+TEST_F(ValidatedAttributionSourceStateTest, providedPackageValid) {
+    const std::string package = "com.package1";
+    EXPECT_CALL(mMockProvider, validateUidPackagePair(mUid, package)).WillOnce(Return(true));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = package;
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsOkAnd(Eq(attr)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, providedPackageInvalid) {
+    const std::string package = "com.package.spoof";
+    EXPECT_CALL(mMockProvider, validateUidPackagePair(mUid, package)).WillOnce(Return(false));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = package;
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_SECURITY)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, packageLookup_whenMissingPackage) {
+    EXPECT_CALL(mMockProvider, getPackagesForUid(mUid)).WillOnce(Return(mPackageList));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    AttributionSourceState expectedAttr;
+    expectedAttr.uid = mUid;
+    expectedAttr.packageName = "com.package1";
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsOkAnd(Eq(expectedAttr)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, packageLookup_whenEmptyPackage) {
+    EXPECT_CALL(mMockProvider, getPackagesForUid(mUid)).WillOnce(Return(mPackageList));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = std::string{};
+    AttributionSourceState expectedAttr;
+    expectedAttr.uid = mUid;
+    expectedAttr.packageName = "com.package1";
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsOkAnd(Eq(expectedAttr)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, controllerNotInitialized) {
+    EXPECT_CALL(mMockProvider, getPackagesForUid(mUid))
+            .WillOnce(Return(unexpected{Status::fromExceptionCode(EX_ILLEGAL_STATE)}));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = std::string{};
+    AttributionSourceState expectedAttr;
+    expectedAttr.uid = mUid;
+    expectedAttr.packageName = "com.package1";
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_STATE)));
+}
+
+TEST_F(ValidatedAttributionSourceStateTest, uidNotFound) {
+    EXPECT_CALL(mMockProvider, getPackagesForUid(mUid))
+            .WillOnce(Return(unexpected{Status::fromExceptionCode(EX_ILLEGAL_ARGUMENT)}));
+    AttributionSourceState attr;
+    attr.uid = mUid;
+    attr.packageName = std::string{};
+    EXPECT_THAT(ValidatedAttributionSourceState::createFromTrustedUidNoPackage(attr, mMockProvider),
+                IsErrorAnd(BinderStatusMatcher::hasException(EX_ILLEGAL_ARGUMENT)));
+}
diff --git a/services/audiopolicy/service/Android.bp b/services/audiopolicy/service/Android.bp
index 9b7a470..e157808 100644
--- a/services/audiopolicy/service/Android.bp
+++ b/services/audiopolicy/service/Android.bp
@@ -11,10 +11,16 @@
 cc_defaults {
     name: "libaudiopolicyservice_dependencies",
 
+    include_dirs: [
+        "frameworks/av/services/audiopolicy", // include path outside of libaudiopolicyservice
+    ],
+
     shared_libs: [
         "android.media.audiopolicy-aconfig-cc",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
+        "audiopermissioncontroller",
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "capture_state_listener-aidl-cpp",
@@ -33,6 +39,7 @@
         "libaudioutils",
         "libbinder",
         "libcutils",
+        "libeffectsconfig",
         "libhardware_legacy",
         "libheadtracking",
         "libheadtracking-binding",
@@ -50,11 +57,6 @@
         "packagemanager_aidl-cpp",
         "spatializer-aidl-cpp",
     ],
-
-    static_libs: [
-        "libaudiopolicycomponents",
-        "libeffectsconfig",
-    ],
 }
 
 cc_library {
@@ -80,11 +82,8 @@
         "frameworks/av/services/audioflinger",
     ],
 
-    static_libs: [
-        "framework-permission-aidl-cpp",
-    ],
-
     header_libs: [
+        "audiopolicyservicelocal_headers",
         "libaudiohal_headers",
         "libaudiopolicycommon",
         "libaudiopolicyengine_interface_headers",
@@ -92,25 +91,29 @@
         "libaudioutils_headers",
     ],
 
+    export_include_dirs: ["."],
+
     cflags: [
         "-Wall",
         "-Werror",
         "-Wthread-safety",
         "-fvisibility=hidden",
     ],
-
-    export_shared_lib_headers: [
-        "framework-permission-aidl-cpp",
-        "libactivitymanager_aidl",
-        "libaudiousecasevalidation",
-        "libheadtracking",
-        "libheadtracking-binding",
-        "libsensorprivacy",
-    ],
 }
 
 cc_library_headers {
     name: "libaudiopolicyservice_headers",
     host_supported: true,
-    export_include_dirs: ["."],
+    export_include_dirs: [
+        ".",
+        "include",
+    ],
+}
+
+cc_library_headers {
+    name: "audiopolicyservicelocal_headers",
+    host_supported: true,
+    export_include_dirs: ["include"],
+    header_libs: ["audiopermissioncontroller_headers"],
+    export_header_lib_headers: ["audiopermissioncontroller_headers"],
 }
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index 6de71a3..363dfa7 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -56,7 +56,8 @@
                                                            audio_config_base_t *mixerConfig,
                                                            const sp<DeviceDescriptorBase>& device,
                                                            uint32_t *latencyMs,
-                                                           audio_output_flags_t flags)
+                                                           audio_output_flags_t *flags,
+                                                           audio_attributes_t attributes)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == 0) {
@@ -73,7 +74,9 @@
     request.mixerConfig = VALUE_OR_RETURN_STATUS(
             legacy2aidl_audio_config_base_t_AudioConfigBase(*mixerConfig, false /*isInput*/));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
-    request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(*flags));
+    request.attributes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attributes));
 
     status_t status = af->openOutput(request, &response);
     if (status == OK) {
@@ -86,7 +89,9 @@
             .channel_mask = halConfig->channel_mask,
             .format = halConfig->format,
         };
-        mAudioPolicyService->registerOutput(*output, config, flags);
+        *flags = VALUE_OR_RETURN_STATUS(
+                aidl2legacy_int32_t_audio_output_flags_t_mask(response.flags));
+        mAudioPolicyService->registerOutput(*output, config, *flags);
     }
     return status;
 }
@@ -188,6 +193,16 @@
                                                delay_ms);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setPortsVolume(
+        const std::vector<audio_port_handle_t> &ports, float volume, audio_io_handle_t output,
+        int delayMs)
+{
+    if (ports.empty()) {
+        return NO_ERROR;
+    }
+    return mAudioPolicyService->setPortsVolume(ports, volume, output, delayMs);
+}
+
 void AudioPolicyService::AudioPolicyClient::setParameters(audio_io_handle_t io_handle,
                    const String8& keyValuePairs,
                    int delay_ms)
@@ -352,4 +367,13 @@
     return af->getAudioMixPort(devicePort, port);
 }
 
+status_t AudioPolicyService::AudioPolicyClient::setTracksInternalMute(
+        const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) {
+    sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
+    if (af == nullptr) {
+        return PERMISSION_DENIED;
+    }
+    return af->setTracksInternalMute(tracksInternalMute);
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index a862037..24ab6a1 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -14,20 +14,23 @@
  * limitations under the License.
  */
 
-#define LOG_TAG "AudioPolicyIntefaceImpl"
+#define LOG_TAG "AudioPolicyInterfaceImpl"
 //#define LOG_NDEBUG 0
 
 #include "AudioPolicyService.h"
 #include "AudioRecordClient.h"
 #include "TypeConverter.h"
+
+#include <android/content/AttributionSourceState.h>
 #include <android_media_audiopolicy.h>
+#include <com_android_media_audio.h>
+#include <error/expected_utils.h>
 #include <media/AidlConversion.h>
 #include <media/AudioPolicy.h>
 #include <media/AudioValidator.h>
 #include <media/MediaMetricsItem.h>
 #include <media/PolicyAidlConversion.h>
 #include <utils/Log.h>
-#include <android/content/AttributionSourceState.h>
 
 #define VALUE_OR_RETURN_BINDER_STATUS(x) \
     ({ auto _tmp = (x); \
@@ -43,12 +46,30 @@
         if (!_tmp.isOk()) return _tmp; \
     }
 
+#define CHECK_PERM(expr1, expr2) \
+    VALUE_OR_RETURN_STATUS(getPermissionProvider().checkPermission((expr1), (expr2)))
+
 #define MAX_ITEMS_PER_LIST 1024
 
 namespace android {
 namespace audiopolicy_flags = android::media::audiopolicy;
 using binder::Status;
 using aidl_utils::binderStatusFromStatusT;
+using com::android::media::audio::audioserver_permissions;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::PermissionEnum::ACCESS_ULTRASOUND;
+using com::android::media::permission::PermissionEnum::CALL_AUDIO_INTERCEPTION;
+using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_HOTWORD;
+using com::android::media::permission::PermissionEnum::CAPTURE_VOICE_COMMUNICATION_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_MEDIA_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_TUNER_AUDIO_INPUT;
+using com::android::media::permission::PermissionEnum::MODIFY_AUDIO_ROUTING;
+using com::android::media::permission::PermissionEnum::MODIFY_AUDIO_SETTINGS;
+using com::android::media::permission::PermissionEnum::MODIFY_DEFAULT_AUDIO_EFFECTS;
+using com::android::media::permission::PermissionEnum::MODIFY_PHONE_STATE;
+using com::android::media::permission::PermissionEnum::RECORD_AUDIO;
+using com::android::media::permission::PermissionEnum::WRITE_SECURE_SETTINGS;
 using content::AttributionSourceState;
 using media::audio::common::AudioConfig;
 using media::audio::common::AudioConfigBase;
@@ -58,6 +79,8 @@
 using media::audio::common::AudioFormatDescription;
 using media::audio::common::AudioMode;
 using media::audio::common::AudioOffloadInfo;
+using media::audio::common::AudioPolicyForceUse;
+using media::audio::common::AudioPolicyForcedConfig;
 using media::audio::common::AudioSource;
 using media::audio::common::AudioStreamType;
 using media::audio::common::AudioUsage;
@@ -65,6 +88,10 @@
 using media::audio::common::Int;
 
 constexpr int kDefaultVirtualDeviceId = 0;
+namespace {
+constexpr auto PERMISSION_HARD_DENIED = permission::PermissionChecker::PERMISSION_HARD_DENIED;
+constexpr auto PERMISSION_GRANTED = permission::PermissionChecker::PERMISSION_GRANTED;
+}
 
 const std::vector<audio_usage_t>& SYSTEM_USAGES = {
     AUDIO_USAGE_CALL_ASSISTANT,
@@ -84,31 +111,37 @@
         != std::end(mSupportedSystemUsages);
 }
 
-status_t AudioPolicyService::validateUsage(const audio_attributes_t& attr) {
+Status AudioPolicyService::validateUsage(const audio_attributes_t& attr) {
      return validateUsage(attr, getCallingAttributionSource());
 }
 
-status_t AudioPolicyService::validateUsage(const audio_attributes_t& attr,
+Status AudioPolicyService::validateUsage(const audio_attributes_t& attr,
         const AttributionSourceState& attributionSource) {
     if (isSystemUsage(attr.usage)) {
         if (isSupportedSystemUsage(attr.usage)) {
             if (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
                     && ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)) {
-                if (!callAudioInterceptionAllowed(attributionSource)) {
+                if (!(audioserver_permissions() ?
+                            CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                            : callAudioInterceptionAllowed(attributionSource))) {
                     ALOGE("%s: call audio interception not allowed for attribution source: %s",
                            __func__, attributionSource.toString().c_str());
-                    return PERMISSION_DENIED;
+                    return Status::fromExceptionCode(Status::EX_SECURITY,
+                            "Call audio interception not allowed");
                 }
-            } else if (!modifyAudioRoutingAllowed(attributionSource)) {
+            } else if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid)
+                        : modifyAudioRoutingAllowed(attributionSource))) {
                 ALOGE("%s: modify audio routing not allowed for attribution source: %s",
                         __func__, attributionSource.toString().c_str());
-                return PERMISSION_DENIED;
+                    return Status::fromExceptionCode(Status::EX_SECURITY,
+                            "Modify audio routing not allowed");
             }
         } else {
-            return BAD_VALUE;
+            return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT);
         }
     }
-    return NO_ERROR;
+    return Status::ok();
 }
 
 
@@ -135,7 +168,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE &&
@@ -189,7 +224,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -213,7 +250,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(state) >= AUDIO_MODE_CNT) {
@@ -251,8 +290,8 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::setForceUse(media::AudioPolicyForceUse usageAidl,
-                                       media::AudioPolicyForcedConfig configAidl)
+Status AudioPolicyService::setForceUse(AudioPolicyForceUse usageAidl,
+                                       AudioPolicyForcedConfig configAidl)
 {
     audio_policy_force_use_t usage = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(usageAidl));
@@ -263,7 +302,9 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    if (!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+            : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -281,8 +322,8 @@
     return Status::ok();
 }
 
-Status AudioPolicyService::getForceUse(media::AudioPolicyForceUse usageAidl,
-                                       media::AudioPolicyForcedConfig* _aidl_return) {
+Status AudioPolicyService::getForceUse(AudioPolicyForceUse usageAidl,
+                                       AudioPolicyForcedConfig* _aidl_return) {
     audio_policy_force_use_t usage = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_AudioPolicyForceUse_audio_policy_force_use_t(usageAidl));
 
@@ -353,7 +394,7 @@
 
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr, attributionSource)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attr, attributionSource));
 
     ALOGV("%s()", __func__);
     audio_utils::lock_guard _l(mMutex);
@@ -362,14 +403,22 @@
         aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
         attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
+    const bool bypassInterruptionAllowed = audioserver_permissions() ? (
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid) ||
+            CHECK_PERM(MODIFY_PHONE_STATE, attributionSource.uid) ||
+            CHECK_PERM(WRITE_SECURE_SETTINGS, attributionSource.uid))
+            : bypassInterruptionPolicyAllowed(attributionSource);
+
     if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
-            && !bypassInterruptionPolicyAllowed(attributionSource)) {
+            && !bypassInterruptionAllowed) {
         attr.flags = static_cast<audio_flags_mask_t>(
                 attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
 
     if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(attributionSource)) {
+        if (!(audioserver_permissions() ?
+                CHECK_PERM(ACCESS_ULTRASOUND, attributionSource.uid)
+                : accessUltrasoundAllowed(attributionSource))) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
                     __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -380,6 +429,7 @@
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized = false;
     bool isBitPerfect = false;
+    float volume;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
                                                             attributionSource,
@@ -388,7 +438,8 @@
                                                             &secondaryOutputs,
                                                             &outputType,
                                                             &isSpatialized,
-                                                            &isBitPerfect);
+                                                            &isBitPerfect,
+                                                            &volume);
 
     // FIXME: Introduce a way to check for the the telephony device before opening the output
     if (result == NO_ERROR) {
@@ -398,18 +449,24 @@
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
             if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
-                && !callAudioInterceptionAllowed(attributionSource)) {
+                && !(audioserver_permissions() ?
+                        CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                : callAudioInterceptionAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: call redirection not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
-            } else if (!modifyPhoneStateAllowed(attributionSource)) {
+            } else if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_PHONE_STATE, attributionSource.uid)
+                    : modifyPhoneStateAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
         case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
-            if (!modifyAudioRoutingAllowed(attributionSource)) {
+            if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid)
+                    : modifyAudioRoutingAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
@@ -428,7 +485,7 @@
 
         sp<AudioPlaybackClient> client =
                 new AudioPlaybackClient(attr, output, attributionSource, session,
-                    portId, selectedDeviceId, stream, isSpatialized);
+                    portId, selectedDeviceId, stream, isSpatialized, config.channel_mask);
         mAudioPlaybackClients.add(portId, client);
 
         _aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
@@ -446,6 +503,7 @@
         _aidl_return->isBitPerfect = isBitPerfect;
         _aidl_return->attr = VALUE_OR_RETURN_BINDER_STATUS(
                 legacy2aidl_audio_attributes_t_AudioAttributes(attr));
+        _aidl_return->volume = volume;
     } else {
         _aidl_return->configBase.format = VALUE_OR_RETURN_BINDER_STATUS(
                 legacy2aidl_audio_format_t_AudioFormatDescription(config.format));
@@ -628,8 +686,7 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
-            attributionSource)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attr, attributionSource));
 
     uint32_t virtualDeviceId = kDefaultVirtualDeviceId;
 
@@ -642,7 +699,10 @@
     // type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
     // is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
     // - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
-    if (!(recordingAllowed(attributionSource, inputSource)
+    const auto isRecordingAllowed = audioserver_permissions() ?
+            CHECK_PERM(RECORD_AUDIO, attributionSource.uid) :
+            recordingAllowed(attributionSource, inputSource);
+    if (!(isRecordingAllowed
             || inputSource == AUDIO_SOURCE_FM_TUNER
             || inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
             || inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
@@ -651,8 +711,12 @@
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureOutput = captureAudioOutputAllowed(attributionSource);
-    bool canInterceptCallAudio = callAudioInterceptionAllowed(attributionSource);
+    bool canCaptureOutput = audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_AUDIO_OUTPUT, attributionSource.uid)
+                        : captureAudioOutputAllowed(attributionSource);
+    bool canInterceptCallAudio = audioserver_permissions() ?
+                        CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                        : callAudioInterceptionAllowed(attributionSource);
     bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
              || inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
              || inputSource == AUDIO_SOURCE_VOICE_CALL;
@@ -666,11 +730,15 @@
     }
     if (inputSource == AUDIO_SOURCE_FM_TUNER
         && !canCaptureOutput
-        && !captureTunerAudioInputAllowed(attributionSource)) {
+        && !(audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_TUNER_AUDIO_INPUT, attributionSource.uid)
+            : captureTunerAudioInputAllowed(attributionSource))) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureHotword = captureHotwordAllowed(attributionSource);
+    bool canCaptureHotword = audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_AUDIO_HOTWORD, attributionSource.uid)
+                        : captureHotwordAllowed(attributionSource);
     if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -685,7 +753,9 @@
     }
 
     if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(attributionSource)) {
+        if (!(audioserver_permissions() ?
+                CHECK_PERM(ACCESS_ULTRASOUND, attributionSource.uid)
+                : accessUltrasoundAllowed(attributionSource))) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
                     __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -731,14 +801,29 @@
                     status = PERMISSION_DENIED;
                 }
                 break;
-            case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!(modifyAudioRoutingAllowed(attributionSource)
+            case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE: {
+                bool modAudioRoutingAllowed;
+                if (audioserver_permissions()) {
+                        auto result = getPermissionProvider().checkPermission(
+                                MODIFY_AUDIO_ROUTING, attributionSource.uid);
+                        if (!result.ok()) {
+                            ALOGE("%s permission provider error: %s", __func__,
+                                    result.error().toString8().c_str());
+                            status = aidl_utils::statusTFromBinderStatus(result.error());
+                            break;
+                        }
+                        modAudioRoutingAllowed = result.value();
+                } else {
+                    modAudioRoutingAllowed = modifyAudioRoutingAllowed(attributionSource);
+                }
+                if (!(modAudioRoutingAllowed
                         || ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
                             && canInterceptCallAudio))) {
                     ALOGE("%s permission denied for remote submix capture", __func__);
                     status = PERMISSION_DENIED;
                 }
                 break;
+            }
             case AudioPolicyInterface::API_INPUT_INVALID:
             default:
                 LOG_ALWAYS_FATAL("%s encountered an invalid input type %d",
@@ -821,13 +906,13 @@
 
     std::stringstream msg;
     msg << "Audio recording on session " << client->session;
+    const auto permitted = startRecording(client->attributionSource, client->virtualDeviceId,
+            String16(msg.str().c_str()), client->attributes.source);
 
     // check calling permissions
-    if (!(startRecording(client->attributionSource, client->virtualDeviceId,
-                         String16(msg.str().c_str()), client->attributes.source)
-            || client->attributes.source == AUDIO_SOURCE_FM_TUNER
-            || client->attributes.source == AUDIO_SOURCE_REMOTE_SUBMIX
-            || client->attributes.source == AUDIO_SOURCE_ECHO_REFERENCE)) {
+    if (permitted == PERMISSION_HARD_DENIED && client->attributes.source != AUDIO_SOURCE_FM_TUNER
+            && client->attributes.source != AUDIO_SOURCE_REMOTE_SUBMIX
+            && client->attributes.source != AUDIO_SOURCE_ECHO_REFERENCE) {
         ALOGE("%s permission denied: recording not allowed for attribution source %s",
                 __func__, client->attributionSource.toString().c_str());
         return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -847,13 +932,17 @@
         return binderStatusFromStatusT(INVALID_OPERATION);
     }
 
-    // Force the possibly silenced client to be unsilenced since we just called
-    // startRecording (i.e. we have assumed it is unsilenced).
-    // At this point in time, the client is inactive, so no calls to appops are sent in
-    // setAppState_l.
-    // This ensures existing clients have the same behavior as new clients (starting unsilenced).
+    // Force the possibly silenced client to match the state on the appops side
+    // following the call to startRecording (i.e. unsilenced iff call succeeded)
+    // At this point in time, the client is inactive, so no calls to appops are
+    // sent in setAppState_l. This ensures existing clients have the same
+    // behavior as new clients.
     // TODO(b/282076713)
-    setAppState_l(client, APP_STATE_TOP);
+    if (permitted == PERMISSION_GRANTED) {
+        setAppState_l(client, APP_STATE_TOP);
+    } else {
+        setAppState_l(client, APP_STATE_IDLE);
+    }
 
     client->active = true;
     client->startTimeNs = systemTime();
@@ -939,8 +1028,10 @@
         client->active = false;
         client->startTimeNs = 0;
         updateUidStates_l();
-        finishRecording(client->attributionSource, client->virtualDeviceId,
-                        client->attributes.source);
+        if (!client->silenced) {
+            finishRecording(client->attributionSource, client->virtualDeviceId,
+                    client->attributes.source);
+        }
     }
 
     return binderStatusFromStatusT(status);
@@ -969,7 +1060,11 @@
     updateUidStates_l();
 
     // finish the recording app op
-    finishRecording(client->attributionSource, client->virtualDeviceId, client->attributes.source);
+    if (!client->silenced) {
+        finishRecording(client->attributionSource, client->virtualDeviceId,
+                client->attributes.source);
+    }
+
     AutoCallerClear acc;
     return binderStatusFromStatusT(mAudioPolicyManager->stopInput(portId));
 }
@@ -1020,6 +1115,34 @@
     return Status::ok();
 }
 
+Status AudioPolicyService::setDeviceAbsoluteVolumeEnabled(const AudioDevice& deviceAidl,
+                                                          bool enabled,
+                                                          AudioStreamType streamToDriveAbsAidl) {
+    audio_stream_type_t streamToDriveAbs = VALUE_OR_RETURN_BINDER_STATUS(
+            aidl2legacy_AudioStreamType_audio_stream_type_t(streamToDriveAbsAidl));
+    audio_devices_t deviceType;
+    std::string address;
+    RETURN_BINDER_STATUS_IF_ERROR(
+            aidl2legacy_AudioDevice_audio_device(deviceAidl, &deviceType, &address));
+
+    if (mAudioPolicyManager == nullptr) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
+        return binderStatusFromStatusT(PERMISSION_DENIED);
+    }
+    if (uint32_t(streamToDriveAbs) >= AUDIO_STREAM_PUBLIC_CNT) {
+        return binderStatusFromStatusT(BAD_VALUE);
+    }
+    audio_utils::lock_guard _l(mMutex);
+    AutoCallerClear acc;
+    return binderStatusFromStatusT(
+            mAudioPolicyManager->setDeviceAbsoluteVolumeEnabled(deviceType, address.c_str(),
+                                                                enabled, streamToDriveAbs));
+}
+
 Status AudioPolicyService::initStreamVolume(AudioStreamType streamAidl,
                                             int32_t indexMinAidl,
                                             int32_t indexMaxAidl) {
@@ -1031,7 +1154,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
@@ -1055,7 +1180,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
@@ -1105,7 +1232,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     audio_utils::lock_guard _l(mMutex);
@@ -1411,7 +1540,9 @@
 
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->addSourceDefaultEffect(
@@ -1437,7 +1568,9 @@
 
     sp<AudioPolicyEffects> audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->addStreamDefaultEffect(
@@ -1452,7 +1585,9 @@
             aidl2legacy_int32_t_audio_unique_id_t(idAidl));
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     return binderStatusFromStatusT(audioPolicyEffects->removeSourceDefaultEffect(id));
@@ -1464,7 +1599,9 @@
             aidl2legacy_int32_t_audio_unique_id_t(idAidl));
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     return binderStatusFromStatusT(audioPolicyEffects->removeStreamDefaultEffect(id));
@@ -1482,7 +1619,9 @@
                          std::back_inserter(systemUsages), aidl2legacy_AudioUsage_audio_usage_t)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1541,26 +1680,13 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attributes));
 
     audio_utils::lock_guard _l(mMutex);
     *_aidl_return = mAudioPolicyManager->isDirectOutputSupported(config, attributes);
     return Status::ok();
 }
 
-template <typename Port>
-void anonymizePortBluetoothAddress(Port *port) {
-    if (port->type != AUDIO_PORT_TYPE_DEVICE) {
-        return;
-    }
-    if (!(audio_is_a2dp_device(port->ext.device.type)
-            || audio_is_ble_device(port->ext.device.type)
-            || audio_is_bluetooth_sco_device(port->ext.device.type)
-            || audio_is_hearing_aid_out_device(port->ext.device.type))) {
-        return;
-    }
-    anonymizeBluetoothAddress(port->ext.device.address);
-}
 
 Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
                                           media::AudioPortType typeAidl, Int* count,
@@ -1579,27 +1705,14 @@
     std::unique_ptr<audio_port_v7[]> ports(new audio_port_v7[num_ports]);
     unsigned int generation;
 
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
+    audio_utils::lock_guard _l(mMutex);
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
     AutoCallerClear acc;
-    {
-        audio_utils::lock_guard _l(mMutex);
-        if (mAudioPolicyManager == NULL) {
-            return binderStatusFromStatusT(NO_INIT);
-        }
-        // AudioPolicyManager->listAudioPorts makes a deep copy of port structs into ports
-        // so it is safe to access after releasing the mutex
-        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-                mAudioPolicyManager->listAudioPorts(
-                        role, type, &num_ports, ports.get(), &generation)));
-        numPortsReq = std::min(numPortsReq, num_ports);
-    }
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        for (size_t i = 0; i < numPortsReq; ++i) {
-            anonymizePortBluetoothAddress(&ports[i]);
-        }
-    }
-
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->listAudioPorts(role, type, &num_ports, ports.get(), &generation)));
+    numPortsReq = std::min(numPortsReq, num_ports);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
                          legacy2aidl_audio_port_v7_AudioPortFw)));
@@ -1622,24 +1735,12 @@
 Status AudioPolicyService::getAudioPort(int portId,
                                         media::AudioPortFw* _aidl_return) {
     audio_port_v7 port{ .id = portId };
-
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
+    audio_utils::lock_guard _l(mMutex);
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
     AutoCallerClear acc;
-
-    {
-        audio_utils::lock_guard _l(mMutex);
-        if (mAudioPolicyManager == NULL) {
-            return binderStatusFromStatusT(NO_INIT);
-        }
-        // AudioPolicyManager->getAudioPort makes a deep copy of the port struct into port
-        // so it is safe to access after releasing the mutex
-        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
-    }
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        anonymizePortBluetoothAddress(&port);
-    }
-
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
     return Status::ok();
 }
@@ -1654,7 +1755,9 @@
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPatch(patch)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1673,7 +1776,9 @@
     audio_patch_handle_t handle = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_patch_handle_t(handleAidl));
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1697,32 +1802,14 @@
     std::unique_ptr<audio_patch[]> patches(new audio_patch[num_patches]);
     unsigned int generation;
 
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
+    audio_utils::lock_guard _l(mMutex);
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
     AutoCallerClear acc;
-
-    {
-        audio_utils::lock_guard _l(mMutex);
-        if (mAudioPolicyManager == NULL) {
-            return binderStatusFromStatusT(NO_INIT);
-        }
-        // AudioPolicyManager->listAudioPatches makes a deep copy of patches structs into patches
-        // so it is safe to access after releasing the mutex
-        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-                mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
-        numPatchesReq = std::min(numPatchesReq, num_patches);
-    }
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        for (size_t i = 0; i < numPatchesReq; ++i) {
-            for (size_t j = 0; j < patches[i].num_sources; ++j) {
-                anonymizePortBluetoothAddress(&patches[i].sources[j]);
-            }
-            for (size_t j = 0; j < patches[i].num_sinks; ++j) {
-                anonymizePortBluetoothAddress(&patches[i].sinks[j]);
-            }
-        }
-    }
-
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
+    numPatchesReq = std::min(numPatchesReq, num_patches);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(patches.get(), patches.get() + numPatchesReq,
                          std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatchFw)));
@@ -1739,7 +1826,9 @@
             binderStatusFromStatusT(AudioValidator::validateAudioPortConfig(config)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1802,7 +1891,9 @@
     // loopback|render only need a MediaProjection (checked in caller AudioService.java)
     bool needModifyAudioRouting = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
             return !is_mix_loopback_render(mix.mRouteFlags); });
-    if (needModifyAudioRouting && !modifyAudioRoutingAllowed()) {
+    if (needModifyAudioRouting && !(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1818,12 +1909,16 @@
     const AttributionSourceState attributionSource = getCallingAttributionSource();
 
 
-    if (needCaptureMediaOutput && !captureMediaOutputAllowed(attributionSource)) {
+    if (needCaptureMediaOutput && !(audioserver_permissions() ?
+                CHECK_PERM(CAPTURE_MEDIA_OUTPUT, attributionSource.uid)
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (needCaptureVoiceCommunicationOutput &&
-        !captureVoiceCommunicationOutputAllowed(attributionSource)) {
+        !(audioserver_permissions() ?
+                CHECK_PERM(CAPTURE_VOICE_COMMUNICATION_OUTPUT, attributionSource.uid)
+                : captureVoiceCommunicationOutputAllowed(attributionSource))) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1880,7 +1975,9 @@
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1894,7 +1991,9 @@
     uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1913,7 +2012,9 @@
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1927,7 +2028,9 @@
     int userId = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(userIdAidl));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+            : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1955,7 +2058,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attributes));
 
     // startAudioSource should be created as the calling uid
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -1984,7 +2087,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     audio_utils::lock_guard _l(mMutex);
@@ -2643,4 +2748,9 @@
             mAudioPolicyManager->clearPreferredMixerAttributes(&attr, portId, uid));
 }
 
+Status AudioPolicyService::getPermissionController(sp<INativePermissionController>* out) {
+    *out = mPermissionController;
+    return Status::ok();
+}
+
 } // namespace android
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 3e1245b..7b7275e 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -61,6 +61,10 @@
 
 static const String16 sManageAudioPolicyPermission("android.permission.MANAGE_AUDIO_POLICY");
 
+namespace {
+constexpr auto PERMISSION_GRANTED = permission::PermissionChecker::PERMISSION_GRANTED;
+}
+
 // Creates an association between Binder code to name for IAudioPolicyService.
 #define IAUDIOPOLICYSERVICE_BINDER_METHOD_MACRO_LIST \
 BINDER_METHOD_ENTRY(onNewAudioModulesAvailable) \
@@ -79,6 +83,7 @@
 BINDER_METHOD_ENTRY(startInput) \
 BINDER_METHOD_ENTRY(stopInput) \
 BINDER_METHOD_ENTRY(releaseInput) \
+BINDER_METHOD_ENTRY(setDeviceAbsoluteVolumeEnabled) \
 BINDER_METHOD_ENTRY(initStreamVolume) \
 BINDER_METHOD_ENTRY(setStreamVolumeIndex) \
 BINDER_METHOD_ENTRY(getStreamVolumeIndex) \
@@ -164,6 +169,7 @@
 BINDER_METHOD_ENTRY(getPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(clearPreferredMixerAttributes) \
 BINDER_METHOD_ENTRY(getRegisteredPolicyMixes) \
+BINDER_METHOD_ENTRY(getPermissionController) \
                                                      \
 // singleton for Binder Method Statistics for IAudioPolicyService
 static auto& getIAudioPolicyServiceStatistics() {
@@ -191,9 +197,7 @@
     media::AudioPolicyConfig apmConfig;
     if (status_t status = clientInterface->getAudioPolicyConfig(&apmConfig); status == OK) {
         auto config = AudioPolicyConfig::loadFromApmAidlConfigWithFallback(apmConfig);
-        LOG_ALWAYS_FATAL_IF(config->getEngineLibraryNameSuffix() !=
-                AudioPolicyConfig::kDefaultEngineLibraryNameSuffix,
-                "Only default engine is currently supported with the AIDL HAL");
+        ALOGD("%s loading APM engine %s", __func__, config->getEngineLibraryNameSuffix().c_str());
         apm = new AudioPolicyManager(config,
                 loadApmEngineLibraryAndCreateEngine(
                         config->getEngineLibraryNameSuffix(), apmConfig.engineConfig),
@@ -226,7 +230,9 @@
       mCaptureStateNotifier(false),
       mCreateAudioPolicyManager(createAudioPolicyManager),
       mDestroyAudioPolicyManager(destroyAudioPolicyManager),
-      mUsecaseValidator(media::createUsecaseValidator()) {
+      mUsecaseValidator(media::createUsecaseValidator()),
+      mPermissionController(sp<NativePermissionController>::make())
+{
       setMinSchedulerPolicy(SCHED_NORMAL, ANDROID_PRIORITY_AUDIO);
       setInheritRt(true);
 }
@@ -313,6 +319,10 @@
     AudioSystem::audioPolicyReady();
 }
 
+const IPermissionProvider& AudioPolicyService::getPermissionProvider() const {
+    return *mPermissionController;
+}
+
 void AudioPolicyService::onAudioSystemReady() {
     sp<AudioPolicyEffects> audioPolicyEffects;
     {
@@ -582,12 +592,13 @@
             if (status == NO_ERROR && currentOutput == newOutput) {
                 return;
             }
-            size_t numActiveTracks = countActiveClientsOnOutput_l(newOutput);
+            std::vector<audio_channel_mask_t> activeTracksMasks =
+                    getActiveTracksMasks_l(newOutput);
             mMutex.unlock();
             // It is OK to call detachOutput() is none is already attached.
             mSpatializer->detachOutput();
             if (status == NO_ERROR && newOutput != AUDIO_IO_HANDLE_NONE) {
-                status = mSpatializer->attachOutput(newOutput, numActiveTracks);
+                status = mSpatializer->attachOutput(newOutput, activeTracksMasks);
             }
             mMutex.lock();
             if (status != NO_ERROR) {
@@ -605,17 +616,17 @@
     }
 }
 
-size_t AudioPolicyService::countActiveClientsOnOutput_l(
+std::vector<audio_channel_mask_t> AudioPolicyService::getActiveTracksMasks_l(
         audio_io_handle_t output, bool spatializedOnly) {
-    size_t count = 0;
+    std::vector<audio_channel_mask_t> activeTrackMasks;
     for (size_t i = 0; i < mAudioPlaybackClients.size(); i++) {
         auto client = mAudioPlaybackClients.valueAt(i);
         if (client->io == output && client->active
                 && (!spatializedOnly || client->isSpatialized)) {
-            count++;
+            activeTrackMasks.push_back(client->channelMask);
         }
     }
-    return count;
+    return activeTrackMasks;
 }
 
 void AudioPolicyService::onUpdateActiveSpatializerTracks_l() {
@@ -631,12 +642,12 @@
         return;
     }
     audio_io_handle_t output = mSpatializer->getOutput();
-    size_t activeClients;
+    std::vector<audio_channel_mask_t> activeTracksMasks;
     {
         audio_utils::lock_guard _l(mMutex);
-        activeClients = countActiveClientsOnOutput_l(output);
+        activeTracksMasks = getActiveTracksMasks_l(output);
     }
-    mSpatializer->updateActiveTracks(activeClients);
+    mSpatializer->updateActiveTracks(activeTracksMasks);
 }
 
 status_t AudioPolicyService::clientCreateAudioPatch(const struct audio_patch *patch,
@@ -865,6 +876,8 @@
 //            OR client has CAPTURE_AUDIO_OUTPUT privileged permission
 //    OR the client is the current InputMethodService
 //        AND a RTT call is active AND the source is VOICE_RECOGNITION
+//    OR The client is an active communication owner
+//        AND is on TOP or latest started
 //    OR Any client
 //        AND The assistant is not on TOP
 //        AND is on TOP or latest started
@@ -1029,7 +1042,12 @@
         bool isTopOrLatestAssistant = latestActiveAssistant == nullptr ? false :
             current->attributionSource.uid == latestActiveAssistant->attributionSource.uid;
 
-        auto canCaptureIfInCallOrCommunication = [&](const auto &recordClient) REQUIRES(mMutex) {
+        // TODO: b/339112720
+        // Refine this logic when we have the correct phone state owner UID. The current issue is
+        // when a VOIP APP use Telecom API to manage calls, the mPhoneStateOwnerUid is AID_SYSTEM
+        // instead of the actual VOIP APP UID, so isPhoneStateOwnerActive here is not accurate.
+        const bool canCaptureIfInCallOrCommunication = [&](const auto& recordClient) REQUIRES(
+                                                               mMutex) {
             uid_t recordUid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(
                 recordClient->attributionSource.uid));
             bool canCaptureCall = recordClient->canCaptureOutput;
@@ -1038,19 +1056,26 @@
                 || recordUid == mPhoneStateOwnerUid;
             return !(isInCall && !canCaptureCall)
                 && !(isInCommunication && !canCaptureCommunication);
-        };
+        }(current);
 
         // By default allow capture if:
         //     The assistant is not on TOP
-        //     AND is on TOP or latest started
-        //     AND there is no active privacy sensitive capture or call
+        //         AND is on TOP or latest started
+        //         AND there is no active privacy sensitive capture or call
         //             OR client has CAPTURE_AUDIO_OUTPUT privileged permission
-        bool allowSensitiveCapture =
+        //     The assistant is on TOP
+        //         AND is ongoing communication owner
+        //         AND is on TOP or latest started
+        const bool allowSensitiveCapture =
             !isSensitiveActive || isTopOrLatestSensitive || current->canCaptureOutput;
-        bool allowCapture = !isAssistantOnTop
-                && (isTopOrLatestActive || isTopOrLatestSensitive)
-                && allowSensitiveCapture
-                && canCaptureIfInCallOrCommunication(current);
+        bool allowCapture = false;
+        if (!isAssistantOnTop) {
+            allowCapture = (isTopOrLatestActive || isTopOrLatestSensitive) &&
+                           allowSensitiveCapture && canCaptureIfInCallOrCommunication;
+        } else {
+            allowCapture = isInCommunication && isTopOrLatestSensitive &&
+                           canCaptureIfInCallOrCommunication;
+        }
 
         if (!current->hasOp()) {
             // Never allow capture if app op is denied
@@ -1073,7 +1098,7 @@
                     allowCapture = true;
                 }
             } else if (allowSensitiveCapture
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 if (isTopOrLatestAssistant
                     && (source == AUDIO_SOURCE_VOICE_RECOGNITION
                         || source == AUDIO_SOURCE_HOTWORD)) {
@@ -1094,7 +1119,7 @@
                     allowCapture = true;
                 }
             } else if (allowSensitiveCapture
-                        && canCaptureIfInCallOrCommunication(current)) {
+                        && canCaptureIfInCallOrCommunication) {
                 if ((source == AUDIO_SOURCE_VOICE_RECOGNITION) || (source == AUDIO_SOURCE_HOTWORD))
                 {
                     allowCapture = true;
@@ -1109,7 +1134,7 @@
             //         Is on TOP AND the source is VOICE_RECOGNITION or HOTWORD
             if (!isAssistantOnTop
                     && allowSensitiveCapture
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 allowCapture = true;
             }
             if (isA11yOnTop) {
@@ -1123,7 +1148,7 @@
             //     AND no call is active
             //         OR client has CAPTURE_AUDIO_OUTPUT privileged permission
             if (onlyHotwordActive
-                    && canCaptureIfInCallOrCommunication(current)) {
+                    && canCaptureIfInCallOrCommunication) {
                 allowCapture = true;
             }
         } else if (mUidPolicy->isCurrentImeUid(currentUid)) {
@@ -1195,9 +1220,10 @@
                 } else {
                     std::stringstream msg;
                     msg << "Audio recording un-silenced on session " << client->session;
-                    if (!startRecording(client->attributionSource, client->virtualDeviceId,
-                                        String16(msg.str().c_str()), client->attributes.source)) {
-                        silenced = true;
+                    if (startRecording(client->attributionSource, client->virtualDeviceId,
+                                String16(msg.str().c_str()), client->attributes.source)
+                                != PERMISSION_GRANTED) {
+                        return;
                     }
                 }
             }
@@ -1307,6 +1333,7 @@
         case TRANSACTION_setPhoneState:
 //FIXME: Allow setForceUse calls from system apps until a better use case routing API is available
 //      case TRANSACTION_setForceUse:
+        case TRANSACTION_setDeviceAbsoluteVolumeEnabled:
         case TRANSACTION_initStreamVolume:
         case TRANSACTION_setStreamVolumeIndex:
         case TRANSACTION_setVolumeIndexForAttributes:
@@ -1368,6 +1395,17 @@
             break;
     }
 
+    switch (code) {
+        case TRANSACTION_getPermissionController: {
+            if (!isAudioServerOrSystemServerUid(IPCThreadState::self()->getCallingUid())) {
+                ALOGW("%s: transaction %d received from PID %d unauthorized UID %d",
+                      __func__, code, IPCThreadState::self()->getCallingPid(),
+                      IPCThreadState::self()->getCallingUid());
+                return INVALID_OPERATION;
+            }
+        }
+    }
+
     const std::string methodName = getIAudioPolicyServiceStatistics().getMethodForCode(code);
     mediautils::TimeCheck check(
             std::string("IAudioPolicyService::").append(methodName),
@@ -1381,9 +1419,9 @@
         } else {
             getIAudioPolicyServiceStatistics().event(code, elapsedMs);
         }
-    }, mediautils::TimeCheck::kDefaultTimeoutDuration,
-    mediautils::TimeCheck::kDefaultSecondChanceDuration,
-    true /* crashOnTimeout */);
+    }, mediautils::TimeCheck::getDefaultTimeoutDuration(),
+    mediautils::TimeCheck::getDefaultSecondChanceDuration(),
+    !property_get_bool("audio.timecheck.disabled", false) /* crashOnTimeout */);
 
     switch (code) {
         case SHELL_COMMAND_TRANSACTION: {
@@ -1769,6 +1807,7 @@
                   ++numTimesBecameEmpty;
                 }
                 mLastCommand = command;
+                status_t createAudioPatchStatus;
 
                 switch (command->mCommand) {
                 case SET_VOLUME: {
@@ -1781,6 +1820,16 @@
                                                                     data->mIO);
                     ul.lock();
                     }break;
+                case SET_PORTS_VOLUME: {
+                    VolumePortsData *data = (VolumePortsData *)command->mParam.get();
+                    ALOGV("AudioCommandThread() processing set volume Ports %s volume %f, \
+                            output %d", data->dumpPorts().c_str(), data->mVolume, data->mIO);
+                    ul.unlock();
+                    command->mStatus = AudioSystem::setPortsVolume(data->mPorts,
+                                                                   data->mVolume,
+                                                                   data->mIO);
+                    ul.lock();
+                    } break;
                 case SET_PARAMETERS: {
                     ParametersData *data = (ParametersData *)command->mParam.get();
                     ALOGV("AudioCommandThread() processing set parameters string %s, io %d",
@@ -1826,10 +1875,11 @@
                     ALOGV("AudioCommandThread() processing create audio patch");
                     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
                     if (af == 0) {
-                        command->mStatus = PERMISSION_DENIED;
+                        createAudioPatchStatus = PERMISSION_DENIED;
                     } else {
                         ul.unlock();
-                        command->mStatus = af->createAudioPatch(&data->mPatch, &data->mHandle);
+                        createAudioPatchStatus = af->createAudioPatch(&data->mPatch,
+                                                                      &data->mHandle);
                         ul.lock();
                     }
                     } break;
@@ -1998,8 +2048,28 @@
                 {
                     audio_utils::lock_guard _l(command->mMutex);
                     if (command->mWaitStatus) {
+                        if (command->mCommand == CREATE_AUDIO_PATCH) {
+                            command->mStatus = createAudioPatchStatus;
+                        }
                         command->mWaitStatus = false;
                         command->mCond.notify_one();
+                    } else if (command->mCommand == CREATE_AUDIO_PATCH &&
+                               command->mStatus == TIMED_OUT &&
+                               createAudioPatchStatus == NO_ERROR) {
+                        // Because of special handling in insertCommand_l() the CREATE_AUDIO_PATCH
+                        // command wait status can be only false in case timeout (see TIMED_OUT)
+                        // happened.
+                        CreateAudioPatchData *createData =
+                                (CreateAudioPatchData *)command->mParam.get();
+                        ALOGW("AudioCommandThread() no caller awaiting for handle(%d) after \
+                                processing create audio patch, going to release it",
+                                createData->mHandle);
+                        sp<AudioCommand> releaseCommand = new AudioCommand();
+                        releaseCommand->mCommand = RELEASE_AUDIO_PATCH;
+                        ReleaseAudioPatchData *releaseData = new ReleaseAudioPatchData();
+                        releaseData->mHandle = createData->mHandle;
+                        releaseCommand->mParam = releaseData;
+                        insertCommand_l(releaseCommand, 0);
                     }
                 }
                 waitTime = -1;
@@ -2093,6 +2163,23 @@
     return sendCommand(command, delayMs);
 }
 
+status_t AudioPolicyService::AudioCommandThread::volumePortsCommand(
+        const std::vector<audio_port_handle_t> &ports, float volume, audio_io_handle_t output,
+        int delayMs)
+{
+    sp<AudioCommand> command = new AudioCommand();
+    command->mCommand = SET_PORTS_VOLUME;
+    sp<VolumePortsData> data = new VolumePortsData();
+    data->mPorts = ports;
+    data->mVolume = volume;
+    data->mIO = output;
+    command->mParam = data;
+    command->mWaitStatus = true;
+    ALOGV("AudioCommandThread() adding set volume ports %s, volume %f, output %d",
+            data->dumpPorts().c_str(), volume, output);
+    return sendCommand(command, delayMs);
+}
+
 status_t AudioPolicyService::AudioCommandThread::parametersCommand(audio_io_handle_t ioHandle,
                                                                    const char *keyValuePairs,
                                                                    int delayMs)
@@ -2423,6 +2510,31 @@
             delayMs = 1;
         } break;
 
+        case SET_PORTS_VOLUME: {
+            VolumePortsData *data = (VolumePortsData *)command->mParam.get();
+            VolumePortsData *data2 = (VolumePortsData *)command2->mParam.get();
+            if (data->mIO != data2->mIO) break;
+            // Can remove command only if port ids list is the same, otherwise, remove from
+            // command 2 all port whose volume will be replaced with command 1 volume.
+            std::vector<audio_port_handle_t> portsOnlyInCommand2{};
+            std::copy_if(data2->mPorts.begin(), data2->mPorts.end(),
+                    std::back_inserter(portsOnlyInCommand2), [&](const auto &portId) {
+                return std::find(data->mPorts.begin(), data->mPorts.end(), portId) ==
+                        data->mPorts.end();
+            });
+            if (!portsOnlyInCommand2.empty()) {
+                data2->mPorts = portsOnlyInCommand2;
+                break;
+            }
+            ALOGV("Filtering out volume command on output %d for ports %s",
+                    data->mIO, data->dumpPorts().c_str());
+            removedCommands.add(command2);
+            command->mTime = command2->mTime;
+            // force delayMs to non 0 so that code below does not request to wait for
+            // command status as the command is now delayed
+            delayMs = 1;
+        } break;
+
         case SET_VOICE_VOLUME: {
             VoiceVolumeData *data = (VoiceVolumeData *)command->mParam.get();
             VoiceVolumeData *data2 = (VoiceVolumeData *)command2->mParam.get();
@@ -2517,7 +2629,8 @@
 
     // Disable wait for status if delay is not 0.
     // Except for create audio patch command because the returned patch handle
-    // is needed by audio policy manager
+    // is needed by audio policy manager. Audio patch created after timeout
+    // (see TIMED_OUT) will be released from threadLoop().
     if (delayMs != 0 && command->mCommand != CREATE_AUDIO_PATCH) {
         command->mWaitStatus = false;
     }
@@ -2569,6 +2682,12 @@
                                                    output, delayMs);
 }
 
+int AudioPolicyService::setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
+                                       audio_io_handle_t output, int delayMs)
+{
+    return (int)mAudioCommandThread->volumePortsCommand(ports, volume, output, delayMs);
+}
+
 int AudioPolicyService::setVoiceVolume(float volume, int delayMs)
 {
     return (int)mAudioCommandThread->voiceVolumeCommand(volume, delayMs);
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 5297e47..c47b5e9 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -21,6 +21,7 @@
 #include <android/media/GetSpatializerResponse.h>
 #include <android-base/thread_annotations.h>
 #include <audio_utils/mutex.h>
+#include <com/android/media/permission/INativePermissionController.h>
 #include <cutils/misc.h>
 #include <cutils/config_utils.h>
 #include <cutils/compiler.h>
@@ -35,6 +36,8 @@
 #include <media/ToneGenerator.h>
 #include <media/AudioEffect.h>
 #include <media/AudioPolicy.h>
+#include <media/IAudioPolicyServiceLocal.h>
+#include <media/NativePermissionController.h>
 #include <media/UsecaseValidator.h>
 #include <mediautils/ServiceUtilities.h>
 #include "AudioPolicyEffects.h"
@@ -44,6 +47,7 @@
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/content/AttributionSourceState.h>
 
+#include <numeric>
 #include <unordered_map>
 
 namespace android {
@@ -68,12 +72,16 @@
 }
 
 using ::android::media::audiopolicy::AudioRecordClient;
+using ::com::android::media::permission::INativePermissionController;
+using ::com::android::media::permission::NativePermissionController;
+using ::com::android::media::permission::IPermissionProvider;
 
 class AudioPolicyService :
     public BinderService<AudioPolicyService>,
     public media::BnAudioPolicyService,
     public IBinder::DeathRecipient,
-    public SpatializerPolicyCallback
+    public SpatializerPolicyCallback,
+    public media::IAudioPolicyServiceLocal
 {
     friend class sp<AudioPolicyService>;
 
@@ -98,10 +106,10 @@
             const std::string& deviceName,
             const AudioFormatDescription& encodedFormat) override;
     binder::Status setPhoneState(AudioMode state, int32_t uid) override;
-    binder::Status setForceUse(media::AudioPolicyForceUse usage,
-                               media::AudioPolicyForcedConfig config) override;
-    binder::Status getForceUse(media::AudioPolicyForceUse usage,
-                               media::AudioPolicyForcedConfig* _aidl_return) override;
+    binder::Status setForceUse(android::media::audio::common::AudioPolicyForceUse usage,
+            android::media::audio::common::AudioPolicyForcedConfig config) override;
+    binder::Status getForceUse(android::media::audio::common::AudioPolicyForceUse usage,
+            android::media::audio::common::AudioPolicyForcedConfig* _aidl_return) override;
     binder::Status getOutput(AudioStreamType stream, int32_t* _aidl_return) override;
     binder::Status getOutputForAttr(const media::audio::common::AudioAttributes& attr,
                                     int32_t session,
@@ -121,6 +129,9 @@
     binder::Status startInput(int32_t portId) override;
     binder::Status stopInput(int32_t portId) override;
     binder::Status releaseInput(int32_t portId) override;
+    binder::Status setDeviceAbsoluteVolumeEnabled(const AudioDevice& device,
+                                                  bool enabled,
+                                                  AudioStreamType streamToDriveAbs) override;
     binder::Status initStreamVolume(AudioStreamType stream, int32_t indexMin,
                                     int32_t indexMax) override;
     binder::Status setStreamVolumeIndex(AudioStreamType stream,
@@ -314,8 +325,14 @@
     binder::Status getRegisteredPolicyMixes(
             std::vector <::android::media::AudioMix>* mixes) override;
 
+    // Should only be called by AudioService to push permission data down to audioserver
+    binder::Status getPermissionController(sp<INativePermissionController>* out) override;
+
     status_t onTransact(uint32_t code, const Parcel& data, Parcel* reply, uint32_t flags) override;
 
+    // -- IAudioPolicyLocal methods
+    const IPermissionProvider& getPermissionProvider() const override;
+
     // IBinder::DeathRecipient
     virtual     void        binderDied(const wp<IBinder>& who);
 
@@ -338,6 +355,21 @@
                                      float volume,
                                      audio_io_handle_t output,
                                      int delayMs = 0);
+
+    /**
+     * Set a volume on AudioTrack port id(s) for a particular output.
+     * For the same user setting, a volume group (and associated given port of the
+     * client's track) can have different volumes for each output destination device
+     * it is attached to.
+     *
+     * @param ports to consider
+     * @param volume to set
+     * @param output to consider
+     * @param delayMs to use
+     * @return NO_ERROR if successful
+     */
+    virtual status_t setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
+            audio_io_handle_t output, int delayMs = 0);
     virtual status_t setVoiceVolume(float volume, int delayMs = 0);
 
     void doOnNewAudioModulesAvailable();
@@ -436,8 +468,8 @@
     app_state_t apmStatFromAmState(int amState);
 
     bool isSupportedSystemUsage(audio_usage_t usage);
-    status_t validateUsage(const audio_attributes_t& attr);
-    status_t validateUsage(const audio_attributes_t& attr,
+    binder::Status validateUsage(const audio_attributes_t& attr);
+    binder::Status validateUsage(const audio_attributes_t& attr,
                            const AttributionSourceState& attributionSource);
 
     void updateUidStates();
@@ -561,6 +593,7 @@
         // commands for tone AudioCommand
         enum {
             SET_VOLUME,
+            SET_PORTS_VOLUME,
             SET_PARAMETERS,
             SET_VOICE_VOLUME,
             STOP_OUTPUT,
@@ -594,6 +627,8 @@
                     void        exit();
                     status_t    volumeCommand(audio_stream_type_t stream, float volume,
                                             audio_io_handle_t output, int delayMs = 0);
+                    status_t    volumePortsCommand(const std::vector<audio_port_handle_t> &ports,
+                            float volume, audio_io_handle_t output, int delayMs = 0);
                     status_t    parametersCommand(audio_io_handle_t ioHandle,
                                             const char *keyValuePairs, int delayMs = 0);
                     status_t    voiceVolumeCommand(float volume, int delayMs = 0);
@@ -668,6 +703,20 @@
             audio_io_handle_t mIO;
         };
 
+        class VolumePortsData : public AudioCommandData {
+        public:
+            std::vector<audio_port_handle_t> mPorts;
+            float mVolume;
+            audio_io_handle_t mIO;
+            std::string dumpPorts() {
+                return std::string("volume ") + std::to_string(mVolume) + " on IO " +
+                        std::to_string(mIO) + " and ports " +
+                        std::accumulate(std::begin(mPorts), std::end(mPorts), std::string{},
+                                       [] (const std::string& ls, int rs) {
+                                return ls + std::to_string(rs) + " "; });
+            }
+        };
+
         class ParametersData : public AudioCommandData {
         public:
             audio_io_handle_t mIO;
@@ -774,7 +823,8 @@
                                     audio_config_base_t *mixerConfig,
                                     const sp<DeviceDescriptorBase>& device,
                                     uint32_t *latencyMs,
-                                    audio_output_flags_t flags);
+                                    audio_output_flags_t *flags,
+                                    audio_attributes_t attributes);
         // creates a special output that is duplicated to the two outputs passed as arguments. The duplication is performed by
         // a special mixer thread in the AudioFlinger.
         virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2);
@@ -807,6 +857,19 @@
         // set a stream volume for a particular output. For the same user setting, a given stream type can have different volumes
         // for each output (destination device) it is attached to.
         virtual status_t setStreamVolume(audio_stream_type_t stream, float volume, audio_io_handle_t output, int delayMs = 0);
+        /**
+         * Set a volume on port(s) for a particular output. For the same user setting, a volume
+         * group (and associated given port of the client's track) can have different volumes for
+         * each output (destination device) it is attached to.
+         *
+         * @param ports to consider
+         * @param volume to set
+         * @param output to consider
+         * @param delayMs to use
+         * @return NO_ERROR if successful
+         */
+        status_t setPortsVolume(const std::vector<audio_port_handle_t> &ports, float volume,
+                audio_io_handle_t output, int delayMs = 0) override;
 
         // function enabling to send proprietary informations directly from audio policy manager to audio hardware interface.
         virtual void setParameters(audio_io_handle_t ioHandle, const String8& keyValuePairs, int delayMs = 0);
@@ -872,6 +935,9 @@
         status_t getAudioMixPort(const struct audio_port_v7 *devicePort,
                                  struct audio_port_v7 *port) override;
 
+        status_t setTracksInternalMute(
+                const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override;
+
      private:
         AudioPolicyService *mAudioPolicyService;
     };
@@ -957,13 +1023,15 @@
                       const audio_io_handle_t io, AttributionSourceState attributionSource,
                             const audio_session_t session, audio_port_handle_t portId,
                             audio_port_handle_t deviceId, audio_stream_type_t stream,
-                            bool isSpatialized) :
+                            bool isSpatialized, audio_channel_mask_t channelMask) :
                     AudioClient(attributes, io, attributionSource, session, portId,
-                        deviceId), stream(stream), isSpatialized(isSpatialized)  {}
+                        deviceId), stream(stream), isSpatialized(isSpatialized),
+                        channelMask(channelMask) {}
                 ~AudioPlaybackClient() override = default;
 
         const audio_stream_type_t stream;
         const bool isSpatialized;
+        const audio_channel_mask_t channelMask;
     };
 
     void getPlaybackClientAndEffects(audio_port_handle_t portId,
@@ -994,14 +1062,14 @@
     void unloadAudioPolicyManager();
 
     /**
-     * Returns the number of active audio tracks on the specified output mixer.
+     * Returns the channel masks for active audio tracks on the specified output mixer.
      * The query can be specified to only include spatialized audio tracks or consider
      * all tracks.
      * @param output the I/O handle of the output mixer to consider
      * @param spatializedOnly true if only spatialized tracks should be considered
-     * @return the number of active tracks.
+     * @return a list of channel masks for all active tracks matching the condition.
      */
-    size_t countActiveClientsOnOutput_l(
+    std::vector<audio_channel_mask_t> getActiveTracksMasks_l(
             audio_io_handle_t output, bool spatializedOnly = true) REQUIRES(mMutex);
 
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAudioPolicyService_Mutex};
@@ -1045,6 +1113,7 @@
     CreateAudioPolicyManagerInstance mCreateAudioPolicyManager;
     DestroyAudioPolicyManagerInstance mDestroyAudioPolicyManager;
     std::unique_ptr<media::UsecaseValidator> mUsecaseValidator;
+    const sp<NativePermissionController> mPermissionController;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/service/AudioRecordClient.cpp b/services/audiopolicy/service/AudioRecordClient.cpp
index 6d8b3cf..733f0d6 100644
--- a/services/audiopolicy/service/AudioRecordClient.cpp
+++ b/services/audiopolicy/service/AudioRecordClient.cpp
@@ -18,15 +18,17 @@
 
 #include "AudioRecordClient.h"
 #include "AudioPolicyService.h"
+#include "binder/AppOpsManager.h"
 #include <android_media_audiopolicy.h>
 
+#include <algorithm>
+
 namespace android::media::audiopolicy {
 namespace audiopolicy_flags = android::media::audiopolicy;
 using android::AudioPolicyService;
 
 namespace {
-bool isAppOpSource(audio_source_t source)
-{
+bool isAppOpSource(audio_source_t source) {
     switch (source) {
         case AUDIO_SOURCE_FM_TUNER:
         case AUDIO_SOURCE_ECHO_REFERENCE:
@@ -55,7 +57,40 @@
 bool doesPackageTargetAtLeastU(std::string_view packageName) {
     return getTargetSdkForPackageName(packageName) >= __ANDROID_API_U__;
 }
-}
+
+class AttrSourceItr {
+  public:
+    using iterator_category = std::forward_iterator_tag;
+    using difference_type = std::ptrdiff_t;
+    using value_type = AttributionSourceState;
+    using pointer = const value_type*;
+    using reference = const value_type&;
+
+    AttrSourceItr() : mAttr(nullptr) {}
+
+    AttrSourceItr(const AttributionSourceState& attr) : mAttr(&attr) {}
+
+    reference operator*() const { return *mAttr; }
+    pointer operator->() const { return mAttr; }
+
+    AttrSourceItr& operator++() {
+        mAttr = !mAttr->next.empty() ? mAttr->next.data() : nullptr;
+        return *this;
+    }
+
+    AttrSourceItr operator++(int) {
+        AttrSourceItr tmp = *this;
+        ++(*this);
+        return tmp;
+    }
+
+    friend bool operator==(const AttrSourceItr& a, const AttrSourceItr& b) = default;
+
+    static AttrSourceItr end() { return AttrSourceItr{}; }
+private:
+    const AttributionSourceState * mAttr;
+};
+} // anonymous
 
 // static
 sp<OpRecordAudioMonitor>
@@ -110,15 +145,24 @@
     mOpCallback = new RecordAudioOpCallback(this);
     ALOGV("start watching op %d for %s", mAppOp, mAttributionSource.toString().c_str());
 
-    int flags = doesPackageTargetAtLeastU(
-            mAttributionSource.packageName.value_or("")) ?
-            AppOpsManager::WATCH_FOREGROUND_CHANGES : 0;
-    // TODO: We need to always watch AppOpsManager::OP_RECORD_AUDIO too
-    // since it controls the mic permission for legacy apps.
-    mAppOpsManager.startWatchingMode(mAppOp, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
-        mAttributionSource.packageName.value_or(""))),
-        flags,
-        mOpCallback);
+    int flags = doesPackageTargetAtLeastU(mAttributionSource.packageName.value_or(""))
+                        ? AppOpsManager::WATCH_FOREGROUND_CHANGES
+                        : 0;
+
+    const auto reg = [&](int32_t op) {
+        std::for_each(AttrSourceItr{mAttributionSource}, AttrSourceItr::end(),
+                      [&](const auto& attr) {
+                          mAppOpsManager.startWatchingMode(
+                                  op,
+                                  VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                                          attr.packageName.value_or(""))),
+                                  flags, mOpCallback);
+                      });
+    };
+    reg(mAppOp);
+    if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+        reg(AppOpsManager::OP_RECORD_AUDIO);
+    }
 }
 
 bool OpRecordAudioMonitor::hasOp() const {
@@ -131,14 +175,20 @@
 // due to the UID in createIfNeeded(). As a result for those record track, it's:
 // - not called from constructor,
 // - not called from RecordAudioOpCallback because the callback is not installed in this case
-void OpRecordAudioMonitor::checkOp(bool updateUidStates)
-{
-    // TODO: We need to always check AppOpsManager::OP_RECORD_AUDIO too
-    // since it controls the mic permission for legacy apps.
-    const int32_t mode = mAppOpsManager.checkOp(mAppOp,
-            mAttributionSource.uid, VALUE_OR_FATAL(aidl2legacy_string_view_String16(
-                mAttributionSource.packageName.value_or(""))));
-    bool hasIt = (mode == AppOpsManager::MODE_ALLOWED);
+void OpRecordAudioMonitor::checkOp(bool updateUidStates) {
+    const auto check = [&](int32_t op) -> bool {
+        return std::all_of(
+                AttrSourceItr{mAttributionSource}, AttrSourceItr::end(), [&](const auto& x) {
+                    return mAppOpsManager.checkOp(op, x.uid,
+                                                  VALUE_OR_FATAL(aidl2legacy_string_view_String16(
+                                                          x.packageName.value_or("")))) ==
+                           AppOpsManager::MODE_ALLOWED;
+                });
+    };
+    bool hasIt = check(mAppOp);
+    if (mAppOp != AppOpsManager::OP_RECORD_AUDIO) {
+        hasIt = hasIt && check(AppOpsManager::OP_RECORD_AUDIO);
+    }
 
     if (audiopolicy_flags::record_audio_device_aware_permission()) {
         const bool canRecord = recordingAllowed(mAttributionSource, mVirtualDeviceId, mAttr.source);
@@ -173,4 +223,4 @@
     }
 }
 
-} // android::media::audiopolicy::internal
+}  // namespace android::media::audiopolicy
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index c98f8df..c7740ad 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -29,9 +29,7 @@
 #include <android/content/AttributionSourceState.h>
 #include <android/sysprop/BluetoothProperties.sysprop.h>
 #include <audio_utils/fixedfft.h>
-#include <com_android_media_audio.h>
 #include <cutils/bitops.h>
-#include <cutils/properties.h>
 #include <hardware/sensors.h>
 #include <media/stagefright/foundation/AHandler.h>
 #include <media/stagefright/foundation/AMessage.h>
@@ -43,6 +41,7 @@
 #include <utils/Thread.h>
 
 #include "Spatializer.h"
+#include "SpatializerHelper.h"
 
 namespace android {
 
@@ -398,12 +397,10 @@
         return status;
     }
     for (const auto channelMask : channelMasks) {
-        static const bool stereo_spatialization_enabled =
-                property_get_bool("ro.audio.stereo_spatialization_enabled", false);
         const bool channel_mask_spatialized =
-                (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
-                ? audio_channel_mask_contains_stereo(channelMask)
-                : audio_is_channel_mask_spatialized(channelMask);
+                SpatializerHelper::isStereoSpatializationFeatureEnabled()
+                        ? audio_channel_mask_contains_stereo(channelMask)
+                        : audio_is_channel_mask_spatialized(channelMask);
         if (!channel_mask_spatialized) {
             ALOGW("%s: ignoring channelMask:%#x", __func__, channelMask);
             continue;
@@ -936,7 +933,8 @@
             });
 }
 
-status_t Spatializer::attachOutput(audio_io_handle_t output, size_t numActiveTracks) {
+status_t Spatializer::attachOutput(audio_io_handle_t output,
+          const std::vector<audio_channel_mask_t>& activeTracksMasks) {
     bool outputChanged = false;
     sp<media::INativeSpatializerCallback> callback;
 
@@ -944,7 +942,7 @@
         audio_utils::lock_guard lock(mMutex);
         ALOGV("%s output %d mOutput %d", __func__, (int)output, (int)mOutput);
         mLocalLog.log("%s with output %d tracks %zu (mOutput %d)", __func__, (int)output,
-                      numActiveTracks, (int)mOutput);
+                      activeTracksMasks.size(), (int)mOutput);
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
             LOG_ALWAYS_FATAL_IF(mEngine == nullptr, "%s output set without FX engine", __func__);
             // remove FX instance
@@ -969,7 +967,7 @@
 
         outputChanged = mOutput != output;
         mOutput = output;
-        mNumActiveTracks = numActiveTracks;
+        mActiveTracksMasks = activeTracksMasks;
         AudioSystem::addSupportedLatencyModesCallback(this);
 
         std::vector<audio_latency_mode_t> latencyModes;
@@ -1008,7 +1006,8 @@
 
     {
         audio_utils::lock_guard lock(mMutex);
-        mLocalLog.log("%s with output %d tracks %zu", __func__, (int)mOutput, mNumActiveTracks);
+        mLocalLog.log("%s with output %d num tracks %zu",
+            __func__, (int)mOutput, mActiveTracksMasks.size());
         ALOGV("%s mOutput %d", __func__, (int)mOutput);
         if (mOutput == AUDIO_IO_HANDLE_NONE) {
             return output;
@@ -1051,11 +1050,13 @@
     }
 }
 
-void Spatializer::updateActiveTracks(size_t numActiveTracks) {
+void Spatializer::updateActiveTracks(
+        const std::vector<audio_channel_mask_t>& activeTracksMasks) {
     audio_utils::lock_guard lock(mMutex);
-    if (mNumActiveTracks != numActiveTracks) {
-        mLocalLog.log("%s from %zu to %zu", __func__, mNumActiveTracks, numActiveTracks);
-        mNumActiveTracks = numActiveTracks;
+    if (mActiveTracksMasks != activeTracksMasks) {
+        mLocalLog.log("%s from %zu to %zu",
+                __func__, mActiveTracksMasks.size(), activeTracksMasks.size());
+        mActiveTracksMasks = activeTracksMasks;
         checkEngineState_l();
         checkSensorsState_l();
     }
@@ -1114,7 +1115,7 @@
         if (mPoseController != nullptr) {
             // TODO(b/253297301, b/255433067) reenable low latency condition check
             // for Head Tracking after Bluetooth HAL supports it correctly.
-            if (mNumActiveTracks > 0 && mLevel != Spatialization::Level::NONE
+            if (shouldUseHeadTracking_l() && mLevel != Spatialization::Level::NONE
                     && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
                     && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
                 if (supportsLowLatencyMode) {
@@ -1146,9 +1147,28 @@
     }
 }
 
+
+/* static */
+bool Spatializer::containsImmersiveChannelMask(
+        const std::vector<audio_channel_mask_t>& masks)
+{
+    for (auto mask : masks) {
+        if (audio_is_channel_mask_spatialized(mask)) {
+            return true;
+        }
+    }
+    // Only non-immersive channel masks, e.g. AUDIO_CHANNEL_OUT_STEREO, are present.
+    return false;
+}
+
+bool Spatializer::shouldUseHeadTracking_l() const {
+    // Headtracking only available on immersive channel masks.
+    return containsImmersiveChannelMask(mActiveTracksMasks);
+}
+
 void Spatializer::checkEngineState_l() {
     if (mEngine != nullptr) {
-        if (mLevel != Spatialization::Level::NONE && mNumActiveTracks > 0) {
+        if (mLevel != Spatialization::Level::NONE && mActiveTracksMasks.size() > 0) {
             mEngine->setEnabled(true);
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
                     std::vector<Spatialization::Level>{mLevel});
@@ -1237,7 +1257,8 @@
     base::StringAppendF(&ss, "\n%smSupportsHeadTracking: %s\n", prefixSpace.c_str(),
                         mSupportsHeadTracking ? "true" : "false");
     // 2. Settings (Output, tracks)
-    base::StringAppendF(&ss, "%smNumActiveTracks: %zu\n", prefixSpace.c_str(), mNumActiveTracks);
+    base::StringAppendF(&ss, "%sNum Active Tracks: %zu\n",
+            prefixSpace.c_str(), mActiveTracksMasks.size());
     base::StringAppendF(&ss, "%sOutputStreamHandle: %d\n", prefixSpace.c_str(), (int)mOutput);
 
     // 3. Sensors, Effect information.
@@ -1248,8 +1269,9 @@
                         mDisplayOrientation);
 
     // 4. Show flag or property state.
-    base::StringAppendF(&ss, "%sStereo Spatialization: %s\n", prefixSpace.c_str(),
-            com_android_media_audio_stereo_spatialization() ? "true" : "false");
+    base::StringAppendF(
+            &ss, "%sStereo Spatialization: %s\n", prefixSpace.c_str(),
+            SpatializerHelper::isStereoSpatializationFeatureEnabled() ? "true" : "false");
 
     ss.append(prefixSpace + "CommandLog:\n");
     ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index c5f159c..5ea3258 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -185,7 +185,8 @@
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is opened and the spatializer engine must be created.
      */
-    status_t attachOutput(audio_io_handle_t output, size_t numActiveTracks);
+    status_t attachOutput(audio_io_handle_t output,
+                          const std::vector<audio_channel_mask_t>& activeTracksMasks);
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is closed and the spatializer engine must be release.
      */
@@ -199,7 +200,7 @@
         mOutput = output;
     }
 
-    void updateActiveTracks(size_t numActiveTracks);
+    void updateActiveTracks(const std::vector<audio_channel_mask_t>& activeTracksMasks);
 
     /** Gets the channel mask, sampling rate and format set for the spatializer input. */
     audio_config_base_t getAudioInConfig() const;
@@ -227,6 +228,16 @@
     void onSupportedLatencyModesChangedMsg(
             audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
 
+    // Made public for test only
+    /**
+     * Returns true if there exists an immersive channel mask in the vector.
+     *
+     * Example of a non-immersive channel mask such as AUDIO_CHANNEL_OUT_STEREO
+     * versus an immersive channel mask such as AUDIO_CHANNEL_OUT_5POINT1.
+     */
+    static bool containsImmersiveChannelMask(
+            const std::vector<audio_channel_mask_t>& masks);
+
 private:
     Spatializer(effect_descriptor_t engineDescriptor,
                      SpatializerPolicyCallback *callback);
@@ -462,6 +473,11 @@
      */
     audio_latency_mode_t selectHeadtrackingConnectionMode_l() REQUIRES(mMutex);
 
+    /**
+     * Indicates if current conditions are compatible with head tracking.
+     */
+    bool shouldUseHeadTracking_l() const REQUIRES(mMutex);
+
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
     /** Callback interface to parent audio policy service */
@@ -539,7 +555,7 @@
     sp<ALooper> mLooper;
     sp<EngineCallbackHandler> mHandler;
 
-    size_t mNumActiveTracks GUARDED_BY(mMutex) = 0;
+    std::vector<audio_channel_mask_t> mActiveTracksMasks GUARDED_BY(mMutex);
     std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mMutex);
     /** preference order for low latency modes according to persist.bluetooth.hid.transport */
     std::vector<audio_latency_mode_t> mOrderedLowLatencyModes;
diff --git a/services/audiopolicy/service/SpatializerPoseController.cpp b/services/audiopolicy/service/SpatializerPoseController.cpp
index 874bde4..368dde0 100644
--- a/services/audiopolicy/service/SpatializerPoseController.cpp
+++ b/services/audiopolicy/service/SpatializerPoseController.cpp
@@ -22,6 +22,7 @@
 
 #define LOG_TAG "SpatializerPoseController"
 //#define LOG_NDEBUG 0
+#include <audio_utils/mutex.h>
 #include <cutils/properties.h>
 #include <sensor/Sensor.h>
 #include <media/MediaMetricsItem.h>
@@ -131,20 +132,22 @@
               Pose3f headToStage;
               std::optional<HeadTrackingMode> modeIfChanged;
               {
-                  std::unique_lock lock(mMutex);
-                  if (maxUpdatePeriod.has_value()) {
-                      mCondVar.wait_for(lock, maxUpdatePeriod.value(),
-                                        [this] { return mShouldExit || mShouldCalculate; });
-                  } else {
-                      mCondVar.wait(lock, [this] { return mShouldExit || mShouldCalculate; });
+                  audio_utils::unique_lock ul(mMutex);
+                  while (true) {
+                      if (mShouldExit) {
+                          ALOGV("Exiting thread");
+                          return;
+                      }
+                      if (mShouldCalculate) {
+                          std::tie(headToStage, modeIfChanged) = calculate_l();
+                          break;
+                      }
+                      if (maxUpdatePeriod.has_value()) {
+                          mCondVar.wait_for(ul, maxUpdatePeriod.value());
+                      } else {
+                          mCondVar.wait(ul);
+                      }
                   }
-                  if (mShouldExit) {
-                      ALOGV("Exiting thread");
-                      return;
-                  }
-
-                  // Calculate.
-                  std::tie(headToStage, modeIfChanged) = calculate_l();
               }
 
               // Invoke the callbacks outside the lock.
@@ -173,7 +176,7 @@
 
 SpatializerPoseController::~SpatializerPoseController() {
     {
-        std::unique_lock lock(mMutex);
+        std::lock_guard lock(mMutex);
         mShouldExit = true;
         mCondVar.notify_all();
     }
@@ -278,8 +281,10 @@
 }
 
 void SpatializerPoseController::waitUntilCalculated() {
-    std::unique_lock lock(mMutex);
-    mCondVar.wait(lock, [this] { return mCalculated; });
+    audio_utils::unique_lock ul(mMutex);
+    while (!mCalculated) {
+        mCondVar.wait(ul);
+    }
 }
 
 std::tuple<media::Pose3f, std::optional<media::HeadTrackingMode>>
@@ -358,14 +363,15 @@
     }
 }
 
-std::string SpatializerPoseController::toString(unsigned level) const {
+std::string SpatializerPoseController::toString(unsigned level) const NO_THREAD_SAFETY_ANALYSIS {
     std::string prefixSpace(level, ' ');
     std::string ss = prefixSpace + "SpatializerPoseController:\n";
     bool needUnlock = false;
 
     prefixSpace += ' ';
     auto now = std::chrono::steady_clock::now();
-    if (!mMutex.try_lock_until(now + media::kSpatializerDumpSysTimeOutInSecond)) {
+    if (!audio_utils::std_mutex_timed_lock(mMutex, std::chrono::nanoseconds(
+            media::kSpatializerDumpSysTimeOutInSecond).count())) {
         ss.append(prefixSpace).append("try_lock failed, dumpsys maybe INACCURATE!\n");
     } else {
         needUnlock = true;
diff --git a/services/audiopolicy/service/SpatializerPoseController.h b/services/audiopolicy/service/SpatializerPoseController.h
index 7fa4f86..9955cd8 100644
--- a/services/audiopolicy/service/SpatializerPoseController.h
+++ b/services/audiopolicy/service/SpatializerPoseController.h
@@ -118,34 +118,34 @@
     std::string toString(unsigned level) const;
 
   private:
-    mutable std::timed_mutex mMutex;
+    mutable std::mutex mMutex;
     Listener* const mListener;
     const std::chrono::microseconds mSensorPeriod;
-    std::unique_ptr<media::HeadTrackingProcessor> mProcessor;
-    int32_t mHeadSensor = media::SensorPoseProvider::INVALID_HANDLE;
-    int32_t mScreenSensor = media::SensorPoseProvider::INVALID_HANDLE;
-    std::optional<media::HeadTrackingMode> mActualMode;
-    std::condition_variable_any mCondVar;
-    bool mShouldCalculate = true;
-    bool mShouldExit = false;
-    bool mCalculated = false;
+    std::unique_ptr<media::HeadTrackingProcessor> mProcessor GUARDED_BY(mMutex);
+    int32_t mHeadSensor GUARDED_BY(mMutex) = media::SensorPoseProvider::INVALID_HANDLE;
+    int32_t mScreenSensor GUARDED_BY(mMutex) = media::SensorPoseProvider::INVALID_HANDLE;
+    std::optional<media::HeadTrackingMode> mActualMode GUARDED_BY(mMutex);
+    std::condition_variable mCondVar GUARDED_BY(mMutex);
+    bool mShouldCalculate GUARDED_BY(mMutex) = true;
+    bool mShouldExit GUARDED_BY(mMutex) = false;
+    bool mCalculated GUARDED_BY(mMutex) = false;
 
-    media::VectorRecorder mHeadSensorRecorder{
+    media::VectorRecorder mHeadSensorRecorder GUARDED_BY(mMutex) {
         8 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
         { 3, 6, 7 } /* delimiterIdx */};
-    media::VectorRecorder mHeadSensorDurableRecorder{
+    media::VectorRecorder mHeadSensorDurableRecorder GUARDED_BY(mMutex) {
         8 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
         { 3, 6, 7 } /* delimiterIdx */};
 
-    media::VectorRecorder mScreenSensorRecorder{
+    media::VectorRecorder mScreenSensorRecorder GUARDED_BY(mMutex) {
         4 /* vectorSize */, std::chrono::seconds(1), 10 /* maxLogLine */,
         { 3 } /* delimiterIdx */};
-    media::VectorRecorder mScreenSensorDurableRecorder{
+    media::VectorRecorder mScreenSensorDurableRecorder GUARDED_BY(mMutex) {
         4 /* vectorSize */, std::chrono::minutes(1), 10 /* maxLogLine */,
         { 3 } /* delimiterIdx */};
 
     // Next to last variable as releasing this stops the callbacks
-    std::unique_ptr<media::SensorPoseProvider> mPoseProvider;
+    std::unique_ptr<media::SensorPoseProvider> mPoseProvider GUARDED_BY(mMutex);
 
     // It's important that mThread is the last variable in this class
     // since we starts mThread in initializer list
@@ -158,7 +158,8 @@
      * Calculates the new outputs and updates internal state. Must be called with the lock held.
      * Returns values that should be passed to the respective callbacks.
      */
-    std::tuple<media::Pose3f, std::optional<media::HeadTrackingMode>> calculate_l();
+    std::tuple<media::Pose3f, std::optional<media::HeadTrackingMode>> calculate_l()
+            REQUIRES(mMutex);
 };
 
 }  // namespace android
diff --git a/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h b/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h
new file mode 100644
index 0000000..6776ff9
--- /dev/null
+++ b/services/audiopolicy/service/include/media/IAudioPolicyServiceLocal.h
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <media/IPermissionProvider.h>
+#include <utils/RefBase.h>
+
+namespace android::media {
+
+class IAudioPolicyServiceLocal : public virtual RefBase {
+  public:
+    virtual const ::com::android::media::permission::IPermissionProvider&
+    getPermissionProvider() const = 0;
+
+    virtual ~IAudioPolicyServiceLocal() = default;
+};
+
+}  // namespace android::media
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index fc349ee..154b063 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -12,6 +12,7 @@
     name: "audiopolicy_tests",
 
     defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
         "latest_android_media_audio_common_types_cpp_static",
     ],
 
@@ -40,6 +41,7 @@
     static_libs: [
         "android.media.audiopolicy-aconfig-cc",
         "audioclient-types-aidl-cpp",
+        "com.android.media.audioserver-aconfig-cc",
         "libaudiopolicycomponents",
         "libflagtest",
         "libgmock",
@@ -51,7 +53,10 @@
         "libaudiopolicymanager_interface_headers",
     ],
 
-    srcs: ["audiopolicymanager_tests.cpp"],
+    srcs: [
+        "audiopolicymanager_tests.cpp",
+        "test_execution_tracer.cpp",
+    ],
 
     data: [":audiopolicytest_configuration_files"],
 
@@ -105,7 +110,7 @@
         "-Werror",
     ],
 
-    test_suites: ["device-tests"],
+    test_suites: ["general-tests"],
 
 }
 
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index 7ef0266..79c25ab 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -17,6 +17,7 @@
 #include <map>
 #include <set>
 
+#include <media/TypeConverter.h>
 #include <system/audio.h>
 #include <utils/Log.h>
 #include <utils/String8.h>
@@ -37,17 +38,25 @@
 
     status_t openOutput(audio_module_handle_t module,
                         audio_io_handle_t *output,
-                        audio_config_t * /*halConfig*/,
-                        audio_config_base_t * /*mixerConfig*/,
+                        audio_config_t *halConfig,
+                        audio_config_base_t *mixerConfig,
                         const sp<DeviceDescriptorBase>& /*device*/,
                         uint32_t * /*latencyMs*/,
-                        audio_output_flags_t /*flags*/) override {
+                        audio_output_flags_t *flags,
+                        audio_attributes_t /*attributes*/) override {
         if (module >= mNextModuleHandle) {
             ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
                   __func__, module, mNextModuleHandle);
             return BAD_VALUE;
         }
         *output = mNextIoHandle++;
+        mOpenedOutputs[*output] = *flags;
+        ALOGD("%s: opened output %d: HAL(%s %s %d) Mixer(%s %s %d) %s", __func__, *output,
+              audio_channel_out_mask_to_string(halConfig->channel_mask),
+              audio_format_to_string(halConfig->format), halConfig->sample_rate,
+              audio_channel_out_mask_to_string(mixerConfig->channel_mask),
+              audio_format_to_string(mixerConfig->format), mixerConfig->sample_rate,
+              android::toString(*flags).c_str());
         return NO_ERROR;
     }
 
@@ -57,6 +66,16 @@
         return id;
     }
 
+    status_t closeOutput(audio_io_handle_t output) override {
+        if (auto iter = mOpenedOutputs.find(output); iter != mOpenedOutputs.end()) {
+            mOpenedOutputs.erase(iter);
+            return NO_ERROR;
+        } else {
+            ALOGE("%s: Unknown output %d", __func__, output);
+            return BAD_VALUE;
+        }
+    }
+
     status_t openInput(audio_module_handle_t module,
                        audio_io_handle_t *input,
                        audio_config_t * /*config*/,
@@ -70,6 +89,24 @@
             return BAD_VALUE;
         }
         *input = mNextIoHandle++;
+        mOpenedInputs.insert(*input);
+        ALOGD("%s: opened input %d", __func__, *input);
+        mOpenInputCallsCount++;
+        return NO_ERROR;
+    }
+
+    status_t closeInput(audio_io_handle_t input) override {
+        if (mOpenedInputs.erase(input) != 1) {
+            if (input >= mNextIoHandle) {
+                ALOGE("%s: I/O handle %d has not been allocated yet (next is %d)",
+                      __func__, input, mNextIoHandle);
+            } else {
+                ALOGE("%s: Attempt to close input %d twice", __func__, input);
+            }
+            return BAD_VALUE;
+        }
+        ALOGD("%s: closed input %d", __func__, input);
+        mCloseInputCallsCount++;
         return NO_ERROR;
     }
 
@@ -124,6 +161,8 @@
         return &it->second;
     };
 
+    size_t getOpenedInputsCount() const { return mOpenedInputs.size(); }
+
     audio_module_handle_t peekNextModuleHandle() const { return mNextModuleHandle; }
 
     void swapAllowedModuleNames(std::set<std::string>&& names = {}) {
@@ -221,6 +260,15 @@
         return NO_ERROR;
     }
 
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& tracksInternalMute) override {
+        for (const auto& trackInternalMute : tracksInternalMute) {
+            mTracksInternalMute[(audio_port_handle_t)trackInternalMute.portId] =
+                    trackInternalMute.muted;
+        }
+        return NO_ERROR;
+    }
+
     void addSupportedFormat(audio_format_t format) {
         mSupportedFormats.insert(format);
     }
@@ -229,6 +277,30 @@
         mSupportedChannelMasks.insert(channelMask);
     }
 
+    bool getTrackInternalMute(audio_port_handle_t portId) {
+        auto it = mTracksInternalMute.find(portId);
+        return it == mTracksInternalMute.end() ? false : it->second;
+    }
+    void resetInputApiCallsCounters() {
+        mOpenInputCallsCount = 0;
+        mCloseInputCallsCount = 0;
+    }
+
+    size_t getCloseInputCallsCount() const {
+        return mCloseInputCallsCount;
+    }
+
+    size_t getOpenInputCallsCount() const {
+        return mOpenInputCallsCount;
+    }
+
+    std::optional<audio_output_flags_t> getOpenOutputFlags(audio_io_handle_t output) const {
+        if (auto iter = mOpenedOutputs.find(output); iter != mOpenedOutputs.end()) {
+            return iter->second;
+        }
+        return std::nullopt;
+    }
+
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
     audio_io_handle_t mNextIoHandle = AUDIO_IO_HANDLE_NONE + 1;
@@ -241,6 +313,11 @@
     std::vector<struct audio_port_v7> mDisconnectedDevicePorts;
     std::set<audio_format_t> mSupportedFormats;
     std::set<audio_channel_mask_t> mSupportedChannelMasks;
+    std::map<audio_port_handle_t, bool> mTracksInternalMute;
+    std::set<audio_io_handle_t> mOpenedInputs;
+    size_t mOpenInputCallsCount = 0;
+    size_t mCloseInputCallsCount = 0;
+    std::map<audio_io_handle_t, audio_output_flags_t> mOpenedOutputs;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index e55e935..9ddfd6c 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -37,7 +37,8 @@
                         audio_config_base_t* /*mixerConfig*/,
                         const sp<DeviceDescriptorBase>& /*device*/,
                         uint32_t* /*latencyMs*/,
-                        audio_output_flags_t /*flags*/) override { return NO_INIT; }
+                        audio_output_flags_t* /*flags*/,
+                        audio_attributes_t /*attributes*/) override { return NO_INIT; }
     audio_io_handle_t openDuplicateOutput(audio_io_handle_t /*output1*/,
                                           audio_io_handle_t /*output2*/) override {
         return AUDIO_IO_HANDLE_NONE;
@@ -57,6 +58,10 @@
                              float /*volume*/,
                              audio_io_handle_t /*output*/,
                              int /*delayMs*/) override { return NO_INIT; }
+
+    status_t setPortsVolume(const std::vector<audio_port_handle_t>& /*ports*/, float /*volume*/,
+            audio_io_handle_t /*output*/, int /*delayMs*/) override { return NO_INIT; }
+
     void setParameters(audio_io_handle_t /*ioHandle*/,
                        const String8& /*keyValuePairs*/,
                        int /*delayMs*/) override { }
@@ -110,6 +115,11 @@
                              struct audio_port_v7 *mixPort __unused) override {
         return INVALID_OPERATION;
     }
+
+    status_t setTracksInternalMute(
+            const std::vector<media::TrackInternalMuteInfo>& /*tracksInternalMute*/) override {
+        return INVALID_OPERATION;
+    }
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestManager.h b/services/audiopolicy/tests/AudioPolicyTestManager.h
index aa7c9cd..e30882c 100644
--- a/services/audiopolicy/tests/AudioPolicyTestManager.h
+++ b/services/audiopolicy/tests/AudioPolicyTestManager.h
@@ -24,9 +24,11 @@
     explicit AudioPolicyTestManager(AudioPolicyClientInterface *clientInterface)
             : AudioPolicyTestManager(AudioPolicyConfig::createDefault(), clientInterface) {}
     AudioPolicyTestManager(const sp<const AudioPolicyConfig>& config,
-            AudioPolicyClientInterface *clientInterface)
+            AudioPolicyClientInterface *clientInterface,
+            std::string engineConfig = "")
             : AudioPolicyManager(config,
-                    loadApmEngineLibraryAndCreateEngine(config->getEngineLibraryNameSuffix()),
+                    loadApmEngineLibraryAndCreateEngine(config->getEngineLibraryNameSuffix(),
+                                                        engineConfig),
                     clientInterface) {}
     using AudioPolicyManager::getConfig;
     using AudioPolicyManager::initialize;
@@ -34,6 +36,7 @@
     using AudioPolicyManager::getInputs;
     using AudioPolicyManager::getAvailableOutputDevices;
     using AudioPolicyManager::getAvailableInputDevices;
+    using AudioPolicyManager::checkInputsForDevice;
     using AudioPolicyManager::setSurroundFormatEnabled;
     using AudioPolicyManager::releaseMsdOutputPatches;
     using AudioPolicyManager::setMsdOutputPatches;
@@ -43,7 +46,9 @@
     using AudioPolicyManager::setDeviceConnectionState;
     using AudioPolicyManager::deviceToAudioPort;
     using AudioPolicyManager::handleDeviceConfigChange;
+    using AudioPolicyManager::getInputProfile;
     uint32_t getAudioPortGeneration() const { return mAudioPortGeneration; }
+    HwModuleCollection getHwModules() const { return mHwModules; }
 };
 
 }  // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index f40a7d0..39f9b8a 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -29,11 +29,13 @@
 #include <android-base/properties.h>
 #include <android/content/AttributionSourceState.h>
 #include <android_media_audiopolicy.h>
+#include <com_android_media_audioserver.h>
 #include <flag_macros.h>
 #include <hardware/audio_effect.h>
 #include <media/AudioPolicy.h>
 #include <media/PatchBuilder.h>
 #include <media/RecordingActivityTracker.h>
+#include <media/TypeConverter.h>
 #include <utils/Log.h>
 #include <utils/Vector.h>
 #include <cutils/multiuser.h>
@@ -42,6 +44,7 @@
 #include "AudioPolicyManagerTestClient.h"
 #include "AudioPolicyTestClient.h"
 #include "AudioPolicyTestManager.h"
+#include "test_execution_tracer.h"
 
 using namespace android;
 using testing::UnorderedElementsAre;
@@ -174,6 +177,11 @@
 };
 
 class AudioPolicyManagerTest : public testing::Test {
+  public:
+    constexpr static uint32_t k384000SamplingRate = 384000;
+    constexpr static uint32_t k48000SamplingRate = 48000;
+    constexpr static uint32_t k96000SamplingRate = 96000;
+
   protected:
     void SetUp() override;
     void TearDown() override;
@@ -190,7 +198,7 @@
             audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_NONE,
             audio_io_handle_t *output = nullptr,
             audio_port_handle_t *portId = nullptr,
-            audio_attributes_t attr = {},
+            audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER,
             audio_session_t session = AUDIO_SESSION_NONE,
             int uid = 0,
             bool* isBitPerfect = nullptr);
@@ -221,13 +229,16 @@
     std::unique_ptr<AudioPolicyManagerTestClient> mClient;
     std::unique_ptr<AudioPolicyTestManager> mManager;
 
-    constexpr static const uint32_t k48000SamplingRate = 48000;
+    static const std::string sTestEngineConfig;
 };
 
+const std::string AudioPolicyManagerTest::sTestEngineConfig =
+        base::GetExecutableDirectory() + "/engine/test_audio_policy_engine_configuration.xml";
+
 void AudioPolicyManagerTest::SetUp() {
     mClient.reset(getClient());
     ASSERT_NO_FATAL_FAILURE(SetUpManagerConfig());  // Subclasses may want to customize the config.
-    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get()));
+    mManager.reset(new AudioPolicyTestManager(mConfig, mClient.get(), sTestEngineConfig));
     ASSERT_EQ(NO_ERROR, mManager->initialize());
     ASSERT_EQ(NO_ERROR, mManager->initCheck());
 }
@@ -298,11 +309,12 @@
     AudioPolicyInterface::output_type_t outputType;
     bool isSpatialized;
     bool isBitPerfectInternal;
+    float volume;
     AttributionSourceState attributionSource = createAttributionSourceState(uid);
     ASSERT_EQ(OK, mManager->getOutputForAttr(
                     &attr, output, session, &stream, attributionSource, &config, &flags,
                     selectedDeviceId, portId, {}, &outputType, &isSpatialized,
-                    isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect));
+                    isBitPerfect == nullptr ? &isBitPerfectInternal : isBitPerfect, &volume));
     ASSERT_NE(AUDIO_PORT_HANDLE_NONE, *portId);
     ASSERT_NE(AUDIO_IO_HANDLE_NONE, *output);
 }
@@ -478,8 +490,8 @@
         MsdAudioPatchCount,
         AudioPolicyManagerTestMsd,
         ::testing::Values(
-                MsdAudioPatchCountSpecification(1u, "single"),
-                MsdAudioPatchCountSpecification(2u, "dual")
+                MsdAudioPatchCountSpecification(2u, "single"),
+                MsdAudioPatchCountSpecification(3u, "dual")
         ),
         [](const ::testing::TestParamInfo<MsdAudioPatchCountSpecification> &info) {
                 return std::get<MSD_AUDIO_PATCH_COUNT_NAME_INDEX>(info.param); }
@@ -488,6 +500,9 @@
 void AudioPolicyManagerTestMsd::SetUpManagerConfig() {
     // TODO: Consider using Serializer to load part of the config from a string.
     ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTest::SetUpManagerConfig());
+    mConfig->getHwModules().getModuleFromName(
+            AUDIO_HARDWARE_MODULE_ID_PRIMARY)->setHalVersion(3, 0);
+
     mMsdOutputDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_BUS);
     sp<AudioProfile> pcmOutputProfile = new AudioProfile(
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
@@ -506,7 +521,7 @@
     mConfig->addDevice(mMsdOutputDevice);
     mConfig->addDevice(mMsdInputDevice);
 
-    if (mExpectedAudioPatchCount == 2) {
+    if (mExpectedAudioPatchCount == 3) {
         // Add SPDIF device with PCM output profile as a second device for dual MSD audio patching.
         mSpdifDevice = new DeviceDescriptor(AUDIO_DEVICE_OUT_SPDIF);
         mSpdifDevice->addAudioProfile(pcmOutputProfile);
@@ -519,7 +534,7 @@
                 addOutputProfile(spdifOutputProfile);
     }
 
-    sp<HwModule> msdModule = new HwModule(AUDIO_HARDWARE_MODULE_ID_MSD, 2 /*halVersionMajor*/);
+    sp<HwModule> msdModule = new HwModule(AUDIO_HARDWARE_MODULE_ID_MSD, 3 /*halVersionMajor*/);
     HwModuleCollection modules = mConfig->getHwModules();
     modules.add(msdModule);
     mConfig->setHwModules(modules);
@@ -559,7 +574,7 @@
             addOutputProfile(primaryEncodedOutputProfile);
 
     mDefaultOutputDevice = mConfig->getDefaultOutputDevice();
-    if (mExpectedAudioPatchCount == 2) {
+    if (mExpectedAudioPatchCount == 3) {
         mSpdifDevice->addAudioProfile(dtsOutputProfile);
         primaryEncodedOutputProfile->addSupportedDevice(mSpdifDevice);
     }
@@ -608,7 +623,7 @@
     const PatchCountCheck patchCount = snapshotPatchCount();
     mManager->setForceUse(AUDIO_POLICY_FORCE_FOR_ENCODED_SURROUND,
             AUDIO_POLICY_FORCE_ENCODED_SURROUND_ALWAYS);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount -1 , patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, PatchCreationSetReleaseMsdOutputPatches) {
@@ -616,15 +631,15 @@
     DeviceVector devices = mManager->getAvailableOutputDevices();
     // Remove MSD output device to avoid patching to itself
     devices.remove(mMsdOutputDevice);
-    ASSERT_EQ(mExpectedAudioPatchCount, devices.size());
+    ASSERT_EQ(mExpectedAudioPatchCount -1 , devices.size());
     mManager->setMsdOutputPatches(&devices);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     // Dual patch: exercise creating one new audio patch and reusing another existing audio patch.
     DeviceVector singleDevice(devices[0]);
     mManager->releaseMsdOutputPatches(singleDevice);
-    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 2, patchCount.deltaFromSnapshot());
     mManager->setMsdOutputPatches(&devices);
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     mManager->releaseMsdOutputPatches(devices);
     ASSERT_EQ(0, patchCount.deltaFromSnapshot());
 }
@@ -644,7 +659,7 @@
     getOutputForAttr(&selectedDeviceId,
             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO, k48000SamplingRate);
     ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-    ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrEncodedPlusPcmRoutesToMsd) {
@@ -667,7 +682,7 @@
     getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
             k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
     ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
-    ASSERT_EQ(0, patchCount.deltaFromSnapshot());
+    ASSERT_EQ(1, patchCount.deltaFromSnapshot());
 }
 
 TEST_P(AudioPolicyManagerTestMsd, GetOutputForAttrFormatSwitching) {
@@ -681,7 +696,7 @@
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
         ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
-        ASSERT_EQ(mExpectedAudioPatchCount, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(mExpectedAudioPatchCount - 1, patchCount.deltaFromSnapshot());
     }
     {
         const PatchCountCheck patchCount = snapshotPatchCount();
@@ -690,7 +705,7 @@
         getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_DTS, AUDIO_CHANNEL_OUT_5POINT1,
                 k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT, nullptr /*output*/, &portId);
         ASSERT_NE(selectedDeviceId, mMsdOutputDevice->getId());
-        ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount), patchCount.deltaFromSnapshot());
+        ASSERT_EQ(-static_cast<int>(mExpectedAudioPatchCount) + 2, patchCount.deltaFromSnapshot());
         mManager->releaseOutput(portId);
         ASSERT_EQ(0, patchCount.deltaFromSnapshot());
     }
@@ -700,7 +715,7 @@
         getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_AC3, AUDIO_CHANNEL_OUT_5POINT1,
                 k48000SamplingRate, AUDIO_OUTPUT_FLAG_DIRECT);
         ASSERT_EQ(selectedDeviceId, mDefaultOutputDevice->getId());
-        ASSERT_EQ(0, patchCount.deltaFromSnapshot());
+        ASSERT_EQ(1, patchCount.deltaFromSnapshot());
     }
 }
 
@@ -781,27 +796,27 @@
 
     audio_config_base_t directConfig = AUDIO_CONFIG_BASE_INITIALIZER;
     directConfig.format = AUDIO_FORMAT_DTS;
-    directConfig.sample_rate = 48000;
+    directConfig.sample_rate = k48000SamplingRate;
     directConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
 
     audio_config_base_t nonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
     nonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
-    nonDirectConfig.sample_rate = 48000;
+    nonDirectConfig.sample_rate = k48000SamplingRate;
     nonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
 
     audio_config_base_t nonExistentConfig = AUDIO_CONFIG_BASE_INITIALIZER;
     nonExistentConfig.format = AUDIO_FORMAT_E_AC3;
-    nonExistentConfig.sample_rate = 48000;
+    nonExistentConfig.sample_rate = k48000SamplingRate;
     nonExistentConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
 
     audio_config_base_t msdDirectConfig1 = AUDIO_CONFIG_BASE_INITIALIZER;
     msdDirectConfig1.format = AUDIO_FORMAT_AC3;
-    msdDirectConfig1.sample_rate = 48000;
+    msdDirectConfig1.sample_rate = k48000SamplingRate;
     msdDirectConfig1.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
 
     audio_config_base_t msdDirectConfig2 = AUDIO_CONFIG_BASE_INITIALIZER;
     msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
-    msdDirectConfig2.sample_rate = 48000;
+    msdDirectConfig2.sample_rate = k48000SamplingRate;
     msdDirectConfig2.channel_mask = AUDIO_CHANNEL_INDEX_MASK_24;
 
     audio_config_base_t msdNonDirectConfig = AUDIO_CONFIG_BASE_INITIALIZER;
@@ -848,27 +863,27 @@
 
     audio_config_t directConfig = AUDIO_CONFIG_INITIALIZER;
     directConfig.format = AUDIO_FORMAT_DTS;
-    directConfig.sample_rate = 48000;
+    directConfig.sample_rate = k48000SamplingRate;
     directConfig.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
 
     audio_config_t nonDirectConfig = AUDIO_CONFIG_INITIALIZER;
     nonDirectConfig.format = AUDIO_FORMAT_PCM_16_BIT;
-    nonDirectConfig.sample_rate = 48000;
+    nonDirectConfig.sample_rate = k48000SamplingRate;
     nonDirectConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
 
     audio_config_t nonExistentConfig = AUDIO_CONFIG_INITIALIZER;
     nonExistentConfig.format = AUDIO_FORMAT_E_AC3;
-    nonExistentConfig.sample_rate = 48000;
+    nonExistentConfig.sample_rate = k48000SamplingRate;
     nonExistentConfig.channel_mask = AUDIO_CHANNEL_OUT_STEREO;
 
     audio_config_t msdDirectConfig1 = AUDIO_CONFIG_INITIALIZER;
     msdDirectConfig1.format = AUDIO_FORMAT_AC3;
-    msdDirectConfig1.sample_rate = 48000;
+    msdDirectConfig1.sample_rate = k48000SamplingRate;
     msdDirectConfig1.channel_mask = AUDIO_CHANNEL_OUT_5POINT1;
 
     audio_config_t msdDirectConfig2 = AUDIO_CONFIG_INITIALIZER;
     msdDirectConfig2.format = AUDIO_FORMAT_IEC60958;
-    msdDirectConfig2.sample_rate = 48000;
+    msdDirectConfig2.sample_rate = k48000SamplingRate;
     msdDirectConfig2.channel_mask = AUDIO_CHANNEL_INDEX_MASK_24;
 
     audio_config_t msdNonDirectConfig = AUDIO_CONFIG_INITIALIZER;
@@ -1124,12 +1139,12 @@
     audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+            k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
             AUDIO_SESSION_NONE, uid);
     status_t status = mManager->startOutput(portId);
     if (status == DEAD_OBJECT) {
         getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-                48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
+                k48000SamplingRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
                 AUDIO_SESSION_NONE, uid);
         status = mManager->startOutput(portId);
     }
@@ -1155,129 +1170,6 @@
                                                            "", "", AUDIO_FORMAT_LDAC));
 }
 
-TEST_F(AudioPolicyManagerTestWithConfigurationFile, BitPerfectPlayback) {
-    const audio_format_t bitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
-    const audio_channel_mask_t bitPerfectChannelMask = AUDIO_CHANNEL_OUT_QUAD;
-    const uint32_t bitPerfectSampleRate = 48000;
-    mClient->addSupportedFormat(bitPerfectFormat);
-    mClient->addSupportedChannelMask(bitPerfectChannelMask);
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
-                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
-                                                           "", "", AUDIO_FORMAT_DEFAULT));
-    auto devices = mManager->getAvailableOutputDevices();
-    audio_port_handle_t usbPortId = AUDIO_PORT_HANDLE_NONE;
-    for (auto device : devices) {
-        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
-            usbPortId = device->getId();
-            break;
-        }
-    }
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, usbPortId);
-
-    const uid_t uid = 1234;
-    const uid_t anotherUid = 5678;
-    const audio_attributes_t mediaAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
-            .usage = AUDIO_USAGE_MEDIA,
-    };
-
-    std::vector<audio_mixer_attributes_t> mixerAttributes;
-    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(usbPortId, mixerAttributes));
-    EXPECT_GT(mixerAttributes.size(), 0);
-    size_t bitPerfectIndex = 0;
-    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
-        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
-            break;
-        }
-    }
-    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
-    EXPECT_EQ(bitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
-    EXPECT_EQ(bitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
-    EXPECT_EQ(bitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
-    EXPECT_EQ(NO_ERROR,
-              mManager->setPreferredMixerAttributes(
-                      &mediaAttr, usbPortId, uid, &mixerAttributes[bitPerfectIndex]));
-
-    audio_io_handle_t bitPerfectOutput = AUDIO_IO_HANDLE_NONE;
-    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
-    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t bitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
-    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    bool isBitPerfect;
-
-    // When there is no active bit-perfect playback, the output selection will follow default
-    // routing strategy.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
-    const auto outputDesc = mManager->getOutputs().valueFor(output);
-    EXPECT_NE(nullptr, outputDesc);
-    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // Start bit-perfect playback
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-            mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-    status_t status = mManager->startOutput(bitPerfectPortId);
-    if (status == DEAD_OBJECT) {
-        getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-                bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &bitPerfectOutput, &bitPerfectPortId,
-                mediaAttr, AUDIO_SESSION_NONE, uid, &isBitPerfect);
-        status = mManager->startOutput(bitPerfectPortId);
-    }
-    EXPECT_EQ(NO_ERROR, status);
-    EXPECT_TRUE(isBitPerfect);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, bitPerfectOutput);
-    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(bitPerfectOutput);
-    EXPECT_NE(nullptr, bitPerfectOutputDesc);
-    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
-              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
-
-    // If the playback is from preferred mixer attributes owner but the request doesn't match
-    // preferred mixer attributes, it will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            uid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr, AUDIO_SESSION_NONE,
-            anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    const audio_attributes_t dtmfAttr = {
-            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
-            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
-    };
-    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
-    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
-    portId = AUDIO_PORT_HANDLE_NONE;
-    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
-            48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, dtmfOutput);
-
-    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
-    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
-    getOutputForAttr(&selectedDeviceId, bitPerfectFormat, bitPerfectChannelMask,
-            bitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, mediaAttr,
-            AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
-    EXPECT_FALSE(isBitPerfect);
-    EXPECT_EQ(bitPerfectOutput, output);
-
-    EXPECT_EQ(NO_ERROR,
-              mManager->clearPreferredMixerAttributes(&mediaAttr, usbPortId, uid));
-    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
-                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
-                                                           "", "", AUDIO_FORMAT_LDAC));
-}
-
 TEST_F(AudioPolicyManagerTestWithConfigurationFile, PreferExactConfigForInput) {
     const audio_channel_mask_t deviceChannelMask = AUDIO_CHANNEL_IN_3POINT1;
     mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
@@ -1293,9 +1185,9 @@
     audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
     AttributionSourceState attributionSource = createAttributionSourceState(/*uid=*/ 0);
     audio_config_base_t requestedConfig = {
+            .sample_rate = k48000SamplingRate,
             .channel_mask = AUDIO_CHANNEL_IN_STEREO,
             .format = AUDIO_FORMAT_PCM_16_BIT,
-            .sample_rate = 48000
     };
     audio_config_base_t config = requestedConfig;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
@@ -1326,6 +1218,62 @@
                                                            "", "", AUDIO_FORMAT_DEFAULT));
 }
 
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, CheckInputsForDeviceClosesStreams) {
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_24_BIT_PACKED);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_MONO);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_STEREO);
+    // Since 'checkInputsForDevice' is called as part of the 'setDeviceConnectionState',
+    // call it directly here, as we need to ensure that it does not keep all intermediate
+    // streams opened, as it may cause a rejection from the HAL based on the cap.
+    const size_t streamCountBefore = mClient->getOpenedInputsCount();
+    sp<DeviceDescriptor> device = mManager->getHwModules().getDeviceDescriptor(
+            AUDIO_DEVICE_IN_USB_DEVICE, "", "", AUDIO_FORMAT_DEFAULT, true /*allowToCreate*/);
+    ASSERT_NE(nullptr, device.get());
+    EXPECT_EQ(NO_ERROR,
+            mManager->checkInputsForDevice(device, AUDIO_POLICY_DEVICE_STATE_AVAILABLE));
+    EXPECT_EQ(streamCountBefore, mClient->getOpenedInputsCount());
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, SetDeviceConnectionStateClosesStreams) {
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_16_BIT);
+    mClient->addSupportedFormat(AUDIO_FORMAT_PCM_24_BIT_PACKED);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_MONO);
+    mClient->addSupportedChannelMask(AUDIO_CHANNEL_IN_STEREO);
+    const size_t streamCountBefore = mClient->getOpenedInputsCount();
+    EXPECT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_IN_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    EXPECT_EQ(streamCountBefore, mClient->getOpenedInputsCount());
+}
+
+TEST_F(AudioPolicyManagerTestWithConfigurationFile, UpdateConfigFromInexactProfile) {
+    const audio_format_t expectedFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const uint32_t expectedSampleRate = 48000;
+    const audio_channel_mask_t expectedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+    const std::string expectedIOProfile = "primary input";
+
+    auto devices = mManager->getAvailableInputDevices();
+    sp<DeviceDescriptor> mic = nullptr;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_IN_BUILTIN_MIC) {
+            mic = device;
+            break;
+        }
+    }
+    EXPECT_NE(nullptr, mic);
+
+    audio_format_t requestedFormat = AUDIO_FORMAT_PCM_16_BIT;
+    uint32_t requestedSampleRate = 44100;
+    audio_channel_mask_t requestedChannelMask = AUDIO_CHANNEL_IN_STEREO;
+    auto profile = mManager->getInputProfile(
+            mic, requestedSampleRate, requestedFormat, requestedChannelMask, AUDIO_INPUT_FLAG_NONE);
+    EXPECT_EQ(expectedIOProfile, profile->getName());
+    EXPECT_EQ(expectedFormat, requestedFormat);
+    EXPECT_EQ(expectedSampleRate, requestedSampleRate);
+    EXPECT_EQ(expectedChannelMask, requestedChannelMask);
+}
+
 class AudioPolicyManagerTestDynamicPolicy : public AudioPolicyManagerTestWithConfigurationFile {
 protected:
     void TearDown() override;
@@ -2158,6 +2106,7 @@
     audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
     bool mIsSpatialized;
     bool mIsBitPerfect;
+    float mVolume;
 };
 
 TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting, MmapPlaybackStreamMatchingLoopbackDapMixFails) {
@@ -2176,7 +2125,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
 }
 
 TEST_P(AudioPolicyManagerTestMMapPlaybackRerouting,
@@ -2195,7 +2144,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
 }
 
 TEST_F(AudioPolicyManagerTestMMapPlaybackRerouting,
@@ -2226,7 +2175,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
     ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
     auto outputDesc = mManager->getOutputs().valueFor(mOutput);
     ASSERT_NE(nullptr, outputDesc);
@@ -2242,7 +2191,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
     ASSERT_EQ(usbDevicePort.id, mSelectedDeviceId);
     outputDesc = mManager->getOutputs().valueFor(mOutput);
     ASSERT_NE(nullptr, outputDesc);
@@ -2271,7 +2220,7 @@
               mManager->getOutputForAttr(&attr, &mOutput, AUDIO_SESSION_NONE, &mStream,
                                          createAttributionSourceState(testUid), &audioConfig,
                                          &outputFlags, &mSelectedDeviceId, &mPortId, {},
-                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect));
+                                         &mOutputType, &mIsSpatialized, &mIsBitPerfect, &mVolume));
 }
 
 INSTANTIATE_TEST_SUITE_P(
@@ -2601,10 +2550,12 @@
                         audio_config_base_t * mixerConfig,
                         const sp<DeviceDescriptorBase>& device,
                         uint32_t * latencyMs,
-                        audio_output_flags_t flags) override {
+                        audio_output_flags_t *flags,
+                        audio_attributes_t attributes) override {
         return mSimulateFailure ? BAD_VALUE :
                 AudioPolicyManagerTestClient::openOutput(
-                        module, output, halConfig, mixerConfig, device, latencyMs, flags);
+                        module, output, halConfig, mixerConfig, device, latencyMs, flags,
+                        attributes);
     }
 
     status_t openInput(audio_module_handle_t module,
@@ -2621,8 +2572,29 @@
 
     void setSimulateFailure(bool simulateFailure) { mSimulateFailure = simulateFailure; }
 
+    void setSimulateBroadcastDeviceStatus(audio_devices_t device, status_t status) {
+        if (status != NO_ERROR) {
+            // simulate device connect status
+            mSimulateBroadcastDeviceStatus[device] = status;
+        } else {
+            // remove device connection fixed status
+            mSimulateBroadcastDeviceStatus.erase(device);
+        }
+    }
+
+    status_t setDeviceConnectedState(const struct audio_port_v7* port,
+                                     media::DeviceConnectedState state) override {
+        if (mSimulateBroadcastDeviceStatus.find(port->ext.device.type) !=
+            mSimulateBroadcastDeviceStatus.end()) {
+            // If a simulated status exists, return a status value
+            return mSimulateBroadcastDeviceStatus[port->ext.device.type];
+        }
+        return AudioPolicyManagerTestClient::setDeviceConnectedState(port, state);
+    }
+
   private:
     bool mSimulateFailure = false;
+    std::map<audio_devices_t, status_t> mSimulateBroadcastDeviceStatus;
 };
 
 }  // namespace
@@ -2643,6 +2615,9 @@
     void setSimulateOpenFailure(bool simulateFailure) {
         mFullClient->setSimulateFailure(simulateFailure); }
 
+    void setSimulateBroadcastDeviceStatus(audio_devices_t device, status_t status) {
+        mFullClient->setSimulateBroadcastDeviceStatus(device, status); }
+
     static const std::string sBluetoothConfig;
 
   private:
@@ -2686,6 +2661,30 @@
     }
 }
 
+TEST_P(AudioPolicyManagerTestDeviceConnectionFailed, BroadcastDeviceFailure) {
+    const audio_devices_t type = std::get<0>(GetParam());
+    const std::string name = std::get<1>(GetParam());
+    const std::string address = std::get<2>(GetParam());
+    const audio_format_t format = std::get<3>(GetParam());
+
+    // simulate broadcastDeviceConnectionState return failure
+    setSimulateBroadcastDeviceStatus(type, INVALID_OPERATION);
+    ASSERT_EQ(INVALID_OPERATION, mManager->setDeviceConnectionState(
+            type, AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+            address.c_str(), name.c_str(), format));
+
+    // if broadcast is fail, device should not be added to available devices list
+    if (audio_is_output_device(type)) {
+        auto availableDevices = mManager->getAvailableOutputDevices();
+        EXPECT_FALSE(availableDevices.containsDeviceWithType(type));
+    } else if (audio_is_input_device(type)) {
+        auto availableDevices = mManager->getAvailableInputDevices();
+        EXPECT_FALSE(availableDevices.containsDeviceWithType(type));
+    }
+
+    setSimulateBroadcastDeviceStatus(type, NO_ERROR);
+}
+
 INSTANTIATE_TEST_CASE_P(
         DeviceConnectionFailure,
         AudioPolicyManagerTestDeviceConnectionFailed,
@@ -3270,6 +3269,259 @@
             "low latency");
 }
 
+class AudioPolicyManagerPhoneTest : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+    std::string getConfigFile() override { return sPhoneConfig; }
+    void testOutputMixPortSelectionForAttr(audio_output_flags_t flags, audio_format_t format,
+            int samplingRate, bool isMusic, const char* expectedMixPortName);
+    void testOutputMixPortSelectionForStream(
+            audio_stream_type_t stream, const char* expectedMixPortName);
+    void verifyMixPortNameAndFlags(audio_io_handle_t output, const char* expectedMixPortName);
+
+    static const std::string sPhoneConfig;
+    static const std::map<std::string, audio_output_flags_t> sMixPortFlags;
+};
+
+const std::string AudioPolicyManagerPhoneTest::sPhoneConfig =
+        AudioPolicyManagerPhoneTest::sExecutableDir + "test_phone_apm_configuration.xml";
+
+// Must be in sync with the contents of the sPhoneConfig file.
+const std::map<std::string, audio_output_flags_t> AudioPolicyManagerPhoneTest::sMixPortFlags = {
+        {"primary output",
+         (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_PRIMARY | AUDIO_OUTPUT_FLAG_FAST)},
+        {"direct", AUDIO_OUTPUT_FLAG_DIRECT},
+        {"deep buffer", AUDIO_OUTPUT_FLAG_DEEP_BUFFER},
+        {"compressed_offload",
+         (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD |
+                                AUDIO_OUTPUT_FLAG_NON_BLOCKING |
+                                AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD)},
+        {"raw", (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_RAW | AUDIO_OUTPUT_FLAG_FAST)},
+        {"mmap_no_irq_out",
+         (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_DIRECT|AUDIO_OUTPUT_FLAG_MMAP_NOIRQ)},
+        {"voip_rx", AUDIO_OUTPUT_FLAG_VOIP_RX},
+};
+
+void AudioPolicyManagerPhoneTest::testOutputMixPortSelectionForAttr(
+        audio_output_flags_t flags, audio_format_t format, int samplingRate, bool isMusic,
+        const char* expectedMixPortName) {
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output;
+    audio_port_handle_t portId;
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    if (isMusic) {
+        attr.content_type = AUDIO_CONTENT_TYPE_MUSIC;
+        attr.usage = AUDIO_USAGE_MEDIA;
+    }
+    getOutputForAttr(&selectedDeviceId, format, AUDIO_CHANNEL_OUT_STEREO, samplingRate, flags,
+            &output, &portId, attr);
+    EXPECT_NO_FATAL_FAILURE(verifyMixPortNameAndFlags(output, expectedMixPortName));
+    mManager->releaseOutput(portId);
+}
+
+void AudioPolicyManagerPhoneTest::testOutputMixPortSelectionForStream(
+        audio_stream_type_t stream, const char* expectedMixPortName) {
+    audio_io_handle_t output = mManager->getOutput(stream);
+    EXPECT_NO_FATAL_FAILURE(verifyMixPortNameAndFlags(output, expectedMixPortName));
+}
+
+void AudioPolicyManagerPhoneTest::verifyMixPortNameAndFlags(audio_io_handle_t output,
+                                                            const char* expectedMixPortName) {
+    ALOGI("%s: checking output %d", __func__, output);
+    sp<SwAudioOutputDescriptor> outDesc = mManager->getOutputs().valueFor(output);
+    ASSERT_NE(nullptr, outDesc.get());
+    audio_port_v7 port = {};
+    outDesc->toAudioPort(&port);
+    EXPECT_EQ(AUDIO_PORT_TYPE_MIX, port.type);
+    EXPECT_EQ(AUDIO_PORT_ROLE_SOURCE, port.role);
+    ASSERT_STREQ(expectedMixPortName, port.name);
+
+    auto iter = sMixPortFlags.find(port.name);
+    ASSERT_NE(iter, sMixPortFlags.end()) << "\"" << port.name << "\" is not in sMixPortFlags";
+    auto actualFlags = mClient->getOpenOutputFlags(output);
+    ASSERT_TRUE(actualFlags.has_value()) << "\"" << port.name << "\" was not opened via client";
+    EXPECT_EQ(*actualFlags, iter->second);
+}
+
+TEST_F(AudioPolicyManagerPhoneTest, InitSuccess) {
+    // SetUp must finish with no assertions.
+}
+
+enum {
+    MIX_PORT_ATTR_EXPECTED_NAME_PARAMETER,
+    MIX_PORT_ATTR_EXPECTED_NAME_WITH_DBFM_PARAMETER,
+    MIX_PORT_ATTR_FLAGS_PARAMETER,
+    MIX_PORT_ATTR_FORMAT_PARAMETER,
+    MIX_PORT_ATTR_SAMPLING_RATE_PARAMETER,
+};
+using MixPortSelectionForAttr =
+        std::tuple<const char*, const char*, audio_output_flags_t, audio_format_t, int>;
+
+class AudioPolicyManagerOutputMixPortForAttrSelectionTest
+    : public AudioPolicyManagerPhoneTest,
+      public testing::WithParamInterface<MixPortSelectionForAttr> {
+};
+
+// There is no easy way to create a flat tuple from tuples via ::testing::Combine.
+// Instead, just run the same selection twice while altering the deep buffer for media setting.
+TEST_P(AudioPolicyManagerOutputMixPortForAttrSelectionTest, SelectPortByFlags) {
+    mConfig->setUseDeepBufferForMediaOverrideForTests(false);
+    ASSERT_NO_FATAL_FAILURE(testOutputMixPortSelectionForAttr(
+                    std::get<MIX_PORT_ATTR_FLAGS_PARAMETER>(GetParam()),
+                    std::get<MIX_PORT_ATTR_FORMAT_PARAMETER>(GetParam()),
+                    std::get<MIX_PORT_ATTR_SAMPLING_RATE_PARAMETER>(GetParam()),
+                    false /*isMusic*/,
+                    std::get<MIX_PORT_ATTR_EXPECTED_NAME_PARAMETER>(GetParam())));
+}
+TEST_P(AudioPolicyManagerOutputMixPortForAttrSelectionTest, SelectPortByFlags_Music) {
+    mConfig->setUseDeepBufferForMediaOverrideForTests(false);
+    ASSERT_NO_FATAL_FAILURE(testOutputMixPortSelectionForAttr(
+                    std::get<MIX_PORT_ATTR_FLAGS_PARAMETER>(GetParam()),
+                    std::get<MIX_PORT_ATTR_FORMAT_PARAMETER>(GetParam()),
+                    std::get<MIX_PORT_ATTR_SAMPLING_RATE_PARAMETER>(GetParam()),
+                    true /*isMusic*/,
+                    std::get<MIX_PORT_ATTR_EXPECTED_NAME_PARAMETER>(GetParam())));
+}
+TEST_P(AudioPolicyManagerOutputMixPortForAttrSelectionTest, SelectPortByFlags_DeepMedia) {
+    mConfig->setUseDeepBufferForMediaOverrideForTests(true);
+    const char* fallbackName = std::get<MIX_PORT_ATTR_EXPECTED_NAME_PARAMETER>(GetParam());
+    ASSERT_NO_FATAL_FAILURE(
+            testOutputMixPortSelectionForAttr(std::get<MIX_PORT_ATTR_FLAGS_PARAMETER>(GetParam()),
+                                       std::get<MIX_PORT_ATTR_FORMAT_PARAMETER>(GetParam()),
+                                       std::get<MIX_PORT_ATTR_SAMPLING_RATE_PARAMETER>(GetParam()),
+                                       false /*isMusic*/,
+                                       std::get<MIX_PORT_ATTR_EXPECTED_NAME_WITH_DBFM_PARAMETER>(
+                                               GetParam()) ?: fallbackName));
+}
+TEST_P(AudioPolicyManagerOutputMixPortForAttrSelectionTest, SelectPortByFlags_DeepMedia_Music) {
+    mConfig->setUseDeepBufferForMediaOverrideForTests(true);
+    const char* fallbackName = std::get<MIX_PORT_ATTR_EXPECTED_NAME_PARAMETER>(GetParam());
+    ASSERT_NO_FATAL_FAILURE(
+            testOutputMixPortSelectionForAttr(std::get<MIX_PORT_ATTR_FLAGS_PARAMETER>(GetParam()),
+                                       std::get<MIX_PORT_ATTR_FORMAT_PARAMETER>(GetParam()),
+                                       std::get<MIX_PORT_ATTR_SAMPLING_RATE_PARAMETER>(GetParam()),
+                                       true /*isMusic*/,
+                                       std::get<MIX_PORT_ATTR_EXPECTED_NAME_WITH_DBFM_PARAMETER>(
+                                               GetParam()) ?: fallbackName));
+}
+
+INSTANTIATE_TEST_CASE_P(AudioPolicyManagerOutputMixPortForAttrSelection,
+        AudioPolicyManagerOutputMixPortForAttrSelectionTest,
+        ::testing::Values(
+                std::make_tuple("primary output", "deep buffer", AUDIO_OUTPUT_FLAG_NONE,
+                        AUDIO_FORMAT_PCM_16_BIT, AudioPolicyManagerTest::k48000SamplingRate),
+                std::make_tuple("primary output", "deep buffer", AUDIO_OUTPUT_FLAG_NONE,
+                        AUDIO_FORMAT_PCM_FLOAT, AudioPolicyManagerTest::k48000SamplingRate),
+                // Note: this goes to "direct" because 384000 > SAMPLE_RATE_HZ_MAX (192000)
+                std::make_tuple("direct", "deep buffer", AUDIO_OUTPUT_FLAG_NONE,
+                        AUDIO_FORMAT_PCM_FLOAT, AudioPolicyManagerTest::k384000SamplingRate),
+                std::make_tuple("primary output", nullptr, AUDIO_OUTPUT_FLAG_FAST,
+                        AUDIO_FORMAT_PCM_16_BIT, AudioPolicyManagerTest::k48000SamplingRate),
+                std::make_tuple("direct", nullptr, AUDIO_OUTPUT_FLAG_DIRECT,
+                        AUDIO_FORMAT_PCM_FLOAT, AudioPolicyManagerTest::k96000SamplingRate),
+                std::make_tuple("direct", nullptr, AUDIO_OUTPUT_FLAG_DIRECT,
+                        AUDIO_FORMAT_PCM_FLOAT, AudioPolicyManagerTest::k384000SamplingRate),
+                std::make_tuple("deep buffer", nullptr, AUDIO_OUTPUT_FLAG_DEEP_BUFFER,
+                        AUDIO_FORMAT_PCM_16_BIT, AudioPolicyManagerTest::k48000SamplingRate),
+                std::make_tuple("deep buffer", nullptr, AUDIO_OUTPUT_FLAG_DEEP_BUFFER,
+                        AUDIO_FORMAT_PCM_FLOAT, AudioPolicyManagerTest::k384000SamplingRate),
+                std::make_tuple("compressed_offload", nullptr,
+                        (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD |
+                                AUDIO_OUTPUT_FLAG_NON_BLOCKING),
+                        AUDIO_FORMAT_MP3, AudioPolicyManagerTest::k48000SamplingRate),
+                std::make_tuple("raw", nullptr,
+                        AUDIO_OUTPUT_FLAG_RAW, AUDIO_FORMAT_PCM_32_BIT,
+                        AudioPolicyManagerTest::k48000SamplingRate),
+                std::make_tuple("mmap_no_irq_out", nullptr,
+                        (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_DIRECT |
+                                AUDIO_OUTPUT_FLAG_MMAP_NOIRQ),
+                        AUDIO_FORMAT_PCM_FLOAT, AudioPolicyManagerTest::k48000SamplingRate),
+                std::make_tuple("mmap_no_irq_out", nullptr,
+                        (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_DIRECT |
+                                AUDIO_OUTPUT_FLAG_MMAP_NOIRQ),
+                        AUDIO_FORMAT_PCM_FLOAT, AudioPolicyManagerTest::k384000SamplingRate),
+                std::make_tuple("voip_rx", nullptr, AUDIO_OUTPUT_FLAG_VOIP_RX,
+                        AUDIO_FORMAT_PCM_16_BIT, AudioPolicyManagerTest::k48000SamplingRate)),
+        [](const ::testing::TestParamInfo<MixPortSelectionForAttr>& info) {
+            static const std::string flagPrefix = "AUDIO_OUTPUT_FLAG_";
+            static const std::string formatPrefix = "AUDIO_FORMAT_";
+            std::string flags;
+            TypeConverter<OutputFlagTraits>::maskToString(
+                    std::get<MIX_PORT_ATTR_FLAGS_PARAMETER>(info.param), flags, "__");
+            size_t index = 0;
+            while (true) {
+                index = flags.rfind(flagPrefix);
+                if (index == std::string::npos) break;
+                flags.erase(index, flagPrefix.length());
+            }
+            std::string format;
+            TypeConverter<FormatTraits>::toString(
+                    std::get<MIX_PORT_ATTR_FORMAT_PARAMETER>(info.param), format);
+            if (size_t index = format.find(formatPrefix); index != std::string::npos) {
+                format.erase(index, formatPrefix.length());
+            }
+            return flags + "__" + format + "__" +
+                    std::to_string(std::get<MIX_PORT_ATTR_SAMPLING_RATE_PARAMETER>(info.param));
+        }
+);
+
+
+enum {
+    MIX_PORT_STRM_EXPECTED_NAME_PARAMETER,
+    MIX_PORT_STRM_EXPECTED_NAME_WITH_DBFM_PARAMETER,
+    MIX_PORT_STRM_STREAM_PARAMETER,
+};
+using MixPortSelectionForStream =
+        std::tuple<const char*, const char*, audio_stream_type_t>;
+
+class AudioPolicyManagerOutputMixPortForStreamSelectionTest
+    : public AudioPolicyManagerPhoneTest,
+      public testing::WithParamInterface<MixPortSelectionForStream> {
+};
+
+// There is no easy way to create a flat tuple from tuples via ::testing::Combine.
+// Instead, just run the same selection twice while altering the deep buffer for media setting.
+TEST_P(AudioPolicyManagerOutputMixPortForStreamSelectionTest, SelectPort_NoDBFM) {
+    mConfig->setUseDeepBufferForMediaOverrideForTests(false);
+    ASSERT_NO_FATAL_FAILURE(testOutputMixPortSelectionForStream(
+                    std::get<MIX_PORT_STRM_STREAM_PARAMETER>(GetParam()),
+                    std::get<MIX_PORT_STRM_EXPECTED_NAME_PARAMETER>(GetParam())));
+}
+TEST_P(AudioPolicyManagerOutputMixPortForStreamSelectionTest, SelectPort_WithDBFM) {
+    mConfig->setUseDeepBufferForMediaOverrideForTests(true);
+    const char* fallbackName = std::get<MIX_PORT_STRM_EXPECTED_NAME_PARAMETER>(GetParam());
+    ASSERT_NO_FATAL_FAILURE(testOutputMixPortSelectionForStream(
+                    std::get<MIX_PORT_STRM_STREAM_PARAMETER>(GetParam()),
+                    std::get<MIX_PORT_STRM_EXPECTED_NAME_WITH_DBFM_PARAMETER>(
+                            GetParam()) ?: fallbackName));
+}
+
+INSTANTIATE_TEST_CASE_P(
+        AudioPolicyManagerOutputMixPortForStreamSelection,
+        AudioPolicyManagerOutputMixPortForStreamSelectionTest,
+        ::testing::Values(std::make_tuple("primary output", nullptr, AUDIO_STREAM_DEFAULT),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_SYSTEM),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_RING),
+                          std::make_tuple("primary output", "deep buffer", AUDIO_STREAM_MUSIC),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_ALARM),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_NOTIFICATION),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_BLUETOOTH_SCO),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_ENFORCED_AUDIBLE),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_DTMF),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_TTS),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_ACCESSIBILITY),
+                          std::make_tuple("primary output", nullptr, AUDIO_STREAM_ASSISTANT)),
+        [](const ::testing::TestParamInfo<MixPortSelectionForStream>& info) {
+            static const std::string streamPrefix = "AUDIO_STREAM_";
+            std::string stream;
+            TypeConverter<StreamTraits>::toString(
+                    std::get<MIX_PORT_STRM_STREAM_PARAMETER>(info.param), stream);
+            if (size_t index = stream.find(streamPrefix); index != std::string::npos) {
+                stream.erase(index, streamPrefix.length());
+            }
+            return stream;
+        }
+);
+
 class AudioPolicyManagerDynamicHwModulesTest : public AudioPolicyManagerTestWithConfigurationFile {
 protected:
     void SetUpManagerConfig() override;
@@ -3422,7 +3674,7 @@
     audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
     ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
-                                            48000));
+                                            k48000SamplingRate));
     auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
     ASSERT_NE(nullptr, selectedDevice);
 
@@ -3443,7 +3695,7 @@
     input = AUDIO_PORT_HANDLE_NONE;
     ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
-                                            48000));
+                                            k48000SamplingRate));
     ASSERT_EQ(preferredDevice, availableDevices.getDeviceFromId(selectedDeviceId));
 
     // After clearing preferred device for capture preset, the selected device for input should be
@@ -3454,7 +3706,7 @@
     input = AUDIO_PORT_HANDLE_NONE;
     ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
-                                            48000));
+                                            k48000SamplingRate));
     ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
 
     ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
@@ -3480,7 +3732,7 @@
     audio_io_handle_t input = AUDIO_PORT_HANDLE_NONE;
     ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
-                                            48000));
+                                            k48000SamplingRate));
     auto selectedDevice = availableDevices.getDeviceFromId(selectedDeviceId);
     ASSERT_NE(nullptr, selectedDevice);
 
@@ -3493,7 +3745,7 @@
     input = AUDIO_PORT_HANDLE_NONE;
     ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1,
                                             &selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT,
-                                            AUDIO_CHANNEL_IN_STEREO, 48000));
+                                            AUDIO_CHANNEL_IN_STEREO, k48000SamplingRate));
     ASSERT_NE(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
 
     // After clearing disabled device for capture preset, the selected device for input should be
@@ -3504,7 +3756,7 @@
     input = AUDIO_PORT_HANDLE_NONE;
     ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input, AUDIO_SESSION_NONE, 1, &selectedDeviceId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
-                                            48000));
+                                            k48000SamplingRate));
     ASSERT_EQ(selectedDevice, availableDevices.getDeviceFromId(selectedDeviceId));
 
     ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(
@@ -3607,8 +3859,415 @@
     // effect attached again
     ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &inputClientHandle, session, 1, &routedPortId,
                                             AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
-                                            48000));
+                                            k48000SamplingRate));
 
     // unregister effect should succeed since effect shall have been restore on the client session
     ASSERT_EQ(NO_ERROR, mManager->unregisterEffect(effectId));
 }
+
+class AudioPolicyManagerTestBitPerfectBase : public AudioPolicyManagerTestWithConfigurationFile {
+protected:
+    void SetUp() override;
+    void TearDown() override;
+
+    void startBitPerfectOutput();
+    void reset();
+    void getBitPerfectOutput(status_t expected);
+
+    const audio_format_t mBitPerfectFormat = AUDIO_FORMAT_PCM_16_BIT;
+    const audio_channel_mask_t mBitPerfectChannelMask = AUDIO_CHANNEL_OUT_STEREO;
+    const uint32_t mBitPerfectSampleRate = k48000SamplingRate;
+    const uid_t mUid = 1234;
+    audio_port_handle_t mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+
+    audio_io_handle_t mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+
+    static constexpr audio_attributes_t sMediaAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_MUSIC,
+            .usage = AUDIO_USAGE_MEDIA,
+    };
+};
+
+void AudioPolicyManagerTestBitPerfectBase::SetUp() {
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::SetUp());
+
+    mClient->addSupportedFormat(mBitPerfectFormat);
+    mClient->addSupportedChannelMask(mBitPerfectChannelMask);
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_AVAILABLE,
+                                                           "", "", AUDIO_FORMAT_DEFAULT));
+    auto devices = mManager->getAvailableOutputDevices();
+    mUsbPortId = AUDIO_PORT_HANDLE_NONE;
+    for (auto device : devices) {
+        if (device->type() == AUDIO_DEVICE_OUT_USB_DEVICE) {
+            mUsbPortId = device->getId();
+            break;
+        }
+    }
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, mUsbPortId);
+
+    std::vector<audio_mixer_attributes_t> mixerAttributes;
+    EXPECT_EQ(NO_ERROR, mManager->getSupportedMixerAttributes(mUsbPortId, mixerAttributes));
+    EXPECT_GT(mixerAttributes.size(), 0);
+    size_t bitPerfectIndex = 0;
+    for (; bitPerfectIndex < mixerAttributes.size(); ++bitPerfectIndex) {
+        if (mixerAttributes[bitPerfectIndex].mixer_behavior == AUDIO_MIXER_BEHAVIOR_BIT_PERFECT) {
+            break;
+        }
+    }
+    EXPECT_LT(bitPerfectIndex, mixerAttributes.size());
+    EXPECT_EQ(mBitPerfectFormat, mixerAttributes[bitPerfectIndex].config.format);
+    EXPECT_EQ(mBitPerfectChannelMask, mixerAttributes[bitPerfectIndex].config.channel_mask);
+    EXPECT_EQ(mBitPerfectSampleRate, mixerAttributes[bitPerfectIndex].config.sample_rate);
+    EXPECT_EQ(NO_ERROR,
+              mManager->setPreferredMixerAttributes(
+                      &sMediaAttr, mUsbPortId, mUid, &mixerAttributes[bitPerfectIndex]));
+}
+
+void AudioPolicyManagerTestBitPerfectBase::TearDown() {
+    EXPECT_EQ(NO_ERROR,
+              mManager->clearPreferredMixerAttributes(&sMediaAttr, mUsbPortId, mUid));
+    ASSERT_EQ(NO_ERROR, mManager->setDeviceConnectionState(AUDIO_DEVICE_OUT_USB_DEVICE,
+                                                           AUDIO_POLICY_DEVICE_STATE_UNAVAILABLE,
+                                                           "", "", AUDIO_FORMAT_LDAC));
+
+    ASSERT_NO_FATAL_FAILURE(AudioPolicyManagerTestWithConfigurationFile::TearDown());
+}
+
+void AudioPolicyManagerTestBitPerfectBase::startBitPerfectOutput() {
+    reset();
+    bool isBitPerfect;
+
+    getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                     &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    status_t status = mManager->startOutput(mBitPerfectPortId);
+    if (status == DEAD_OBJECT) {
+        getOutputForAttr(&mSelectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                         mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &mBitPerfectOutput,
+                         &mBitPerfectPortId, sMediaAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+        status = mManager->startOutput(mBitPerfectPortId);
+    }
+    EXPECT_EQ(NO_ERROR, status);
+    EXPECT_TRUE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, mBitPerfectOutput);
+    const auto bitPerfectOutputDesc = mManager->getOutputs().valueFor(mBitPerfectOutput);
+    EXPECT_NE(nullptr, bitPerfectOutputDesc);
+    EXPECT_EQ(AUDIO_OUTPUT_FLAG_BIT_PERFECT,
+              bitPerfectOutputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+};
+
+void AudioPolicyManagerTestBitPerfectBase::reset() {
+    mBitPerfectOutput = AUDIO_IO_HANDLE_NONE;
+    mSelectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    mBitPerfectPortId = AUDIO_PORT_HANDLE_NONE;
+}
+
+void AudioPolicyManagerTestBitPerfectBase::getBitPerfectOutput(status_t expected) {
+    reset();
+    audio_stream_type_t stream = AUDIO_STREAM_DEFAULT;
+    AttributionSourceState attributionSource = createAttributionSourceState(mUid);
+    audio_config_t config = AUDIO_CONFIG_INITIALIZER;
+    config.sample_rate = mBitPerfectSampleRate;
+    config.channel_mask = mBitPerfectChannelMask;
+    config.format = mBitPerfectFormat;
+    audio_output_flags_t flags = AUDIO_OUTPUT_FLAG_BIT_PERFECT;
+    AudioPolicyInterface::output_type_t outputType;
+    bool isSpatialized;
+    bool isBitPerfect;
+    float volume;
+    EXPECT_EQ(expected,
+              mManager->getOutputForAttr(&sMediaAttr, &mBitPerfectOutput, AUDIO_SESSION_NONE,
+                                         &stream, attributionSource, &config, &flags,
+                                         &mSelectedDeviceId, &mBitPerfectPortId, {}, &outputType,
+                                         &isSpatialized, &isBitPerfect, &volume));
+}
+
+class AudioPolicyManagerTestBitPerfect : public AudioPolicyManagerTestBitPerfectBase {
+};
+
+TEST_F(AudioPolicyManagerTestBitPerfect, UseBitPerfectOutput) {
+    const uid_t anotherUid = 5678;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+
+    // When there is no active bit-perfect playback, the output selection will follow default
+    // routing strategy.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, output);
+    const auto outputDesc = mManager->getOutputs().valueFor(output);
+    EXPECT_NE(nullptr, outputDesc);
+    EXPECT_NE(AUDIO_OUTPUT_FLAG_BIT_PERFECT, outputDesc->mFlags & AUDIO_OUTPUT_FLAG_BIT_PERFECT);
+
+    // Start bit-perfect playback
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // If the playback is from preferred mixer attributes owner but the request doesn't match
+    // preferred mixer attributes, it will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_QUAD,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-perfect playback is active, all other playback will be routed to bit-perfect output.
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+
+    // When bit-pefect playback is active, dtmf will also be routed to bit-perfect output.
+    const audio_attributes_t dtmfAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING,
+    };
+    audio_io_handle_t dtmfOutput = AUDIO_IO_HANDLE_NONE;
+    selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    portId = AUDIO_PORT_HANDLE_NONE;
+    getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                     48000, AUDIO_OUTPUT_FLAG_NONE, &dtmfOutput, &portId, dtmfAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, dtmfOutput);
+
+    // When configuration matches preferred mixer attributes, which is bit-perfect, but the client
+    // is not the owner of preferred mixer attributes, the playback will not be bit-perfect.
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     mBitPerfectSampleRate, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, sMediaAttr,
+                     AUDIO_SESSION_NONE, anotherUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, output);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerTestBitPerfect,
+        InternalMuteWhenBitPerfectCLientIsActive,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver,
+                             fix_concurrent_playback_behavior_with_bit_perfect_client))
+) {
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    // When bit-perfect playback is active, the system sound will be routed to bit-perfect output.
+    // The system sound will be muted internally in this case. The bit-perfect client will be
+    // played normally.
+    const uint32_t anotherSampleRate = 44100;
+    audio_port_handle_t systemSoundPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t systemSoundOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t systemSoundAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION,
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    bool isBitPerfect;
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &systemSoundOutput,
+                     &systemSoundPortId, systemSoundAttr, AUDIO_SESSION_NONE, mUid, &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, systemSoundOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(systemSoundPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(systemSoundPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    // When bit-perfect playback is active, the notification will be routed to bit-perfect output.
+    // The notification sound will be played normally while the bit-perfect client will be muted
+    // internally.
+    audio_port_handle_t notificationPortId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t notificationOutput = AUDIO_IO_HANDLE_NONE;
+    const audio_attributes_t notificationAttr = {
+            .content_type = AUDIO_CONTENT_TYPE_SONIFICATION,
+            .usage = AUDIO_USAGE_NOTIFICATION,
+    };
+    getOutputForAttr(&selectedDeviceId, mBitPerfectFormat, mBitPerfectChannelMask,
+                     anotherSampleRate, AUDIO_OUTPUT_FLAG_NONE, &notificationOutput,
+                     &notificationPortId, notificationAttr, AUDIO_SESSION_NONE, mUid,
+                     &isBitPerfect);
+    EXPECT_FALSE(isBitPerfect);
+    EXPECT_EQ(mBitPerfectOutput, notificationOutput);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(notificationPortId));
+    EXPECT_TRUE(mClient->getTrackInternalMute(mBitPerfectPortId));
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(notificationPortId));
+    EXPECT_FALSE(mClient->getTrackInternalMute(mBitPerfectPortId));
+
+    EXPECT_EQ(NO_ERROR, mManager->stopOutput(mBitPerfectPortId));
+}
+
+class AudioPolicyManagerTestBitPerfectPhoneMode : public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_mode_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectPhoneMode, RejectBitPerfectWhenPhoneModeIsNotNormal) {
+    if (!com::android::media::audioserver::
+            fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_mode_t mode = GetParam();
+    mManager->setPhoneState(mode);
+    // When the phone mode is not normal, the bit-perfect output will be reopned
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When the phone mode is not normal, the bit-perfect output will be closed.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+
+    mManager->setPhoneState(AUDIO_MODE_NORMAL);
+}
+
+INSTANTIATE_TEST_CASE_P(
+        PhoneMode,
+        AudioPolicyManagerTestBitPerfectPhoneMode,
+        testing::Values(AUDIO_MODE_IN_CALL,
+                        AUDIO_MODE_RINGTONE,
+                        AUDIO_MODE_IN_COMMUNICATION,
+                        AUDIO_MODE_CALL_SCREEN)
+);
+
+class AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive :
+        public AudioPolicyManagerTestBitPerfectBase,
+        public testing::WithParamInterface<audio_usage_t> {
+};
+
+TEST_P(AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+       RejectBitPerfectWhenHigherPriorityUseCaseIsActive) {
+    if (!com::android::media::audioserver::
+                fix_concurrent_playback_behavior_with_bit_perfect_client()) {
+        GTEST_SKIP()
+                << "Flag fix_concurrent_playback_behavior_with_bit_perfect_client is not enabled";
+    }
+
+    ASSERT_NO_FATAL_FAILURE(startBitPerfectOutput());
+
+    audio_attributes_t attr = {
+            .content_type = AUDIO_CONTENT_TYPE_UNKNOWN,
+            .usage = GetParam(),
+    };
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(
+            getOutputForAttr(&selectedDeviceId, AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_OUT_STEREO,
+                   48000, AUDIO_OUTPUT_FLAG_NONE, &output, &portId, attr));
+    EXPECT_NE(mBitPerfectOutput, output);
+    EXPECT_EQ(NO_ERROR, mManager->startOutput(portId));
+    // When a high priority use case is active, the bit-perfect output will be closed.
+    EXPECT_EQ(nullptr, mManager->getOutputs().valueFor(mBitPerfectOutput));
+
+    // When any higher priority use case is active, the bit-perfect request will be rejected.
+    ASSERT_NO_FATAL_FAILURE(getBitPerfectOutput(INVALID_OPERATION));
+}
+
+INSTANTIATE_TEST_CASE_P(
+        HigherPriorityUseCases,
+        AudioPolicyManagerTestBitPerfectHigherPriorityUseCaseActive,
+        testing::Values(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
+                        AUDIO_USAGE_ALARM)
+);
+
+class AudioPolicyManagerInputPreemptionTest : public AudioPolicyManagerTestWithConfigurationFile {
+};
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        SameSessionReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            k48000SamplingRate));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        k48000SamplingRate));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        LesserPriorityReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            k48000SamplingRate));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    attr.source = AUDIO_SOURCE_VOICE_RECOGNITION;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        k48000SamplingRate));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        HigherPriorityPreemptsInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            k48000SamplingRate));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    attr.source = AUDIO_SOURCE_CAMCORDER;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        k48000SamplingRate));
+
+    EXPECT_EQ(2, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(1, mClient->getCloseInputCallsCount());
+    EXPECT_NE(input1, input2);
+}
+
+int main(int argc, char** argv) {
+    ::testing::InitGoogleTest(&argc, argv);
+    ::testing::UnitTest::GetInstance()->listeners().Append(new TestExecutionTracer());
+    return RUN_ALL_TESTS();
+}
diff --git a/services/audiopolicy/tests/resources/Android.bp b/services/audiopolicy/tests/resources/Android.bp
index 1c191f5..8e7a697 100644
--- a/services/audiopolicy/tests/resources/Android.bp
+++ b/services/audiopolicy/tests/resources/Android.bp
@@ -11,11 +11,16 @@
 filegroup {
     name: "audiopolicytest_configuration_files",
     srcs: [
+        "engine/test_audio_policy_engine_configuration.xml",
+        "engine/test_audio_policy_engine_default_stream_volumes.xml",
+        "engine/test_audio_policy_engine_product_strategies.xml",
+        "engine/test_audio_policy_engine_stream_volumes.xml",
         "test_audio_policy_configuration.xml",
         "test_audio_policy_configuration_bluetooth.xml",
         "test_audio_policy_primary_only_configuration.xml",
         "test_car_ap_atmos_offload_configuration.xml",
         "test_invalid_audio_policy_configuration.xml",
+        "test_phone_apm_configuration.xml",
         "test_settop_box_surround_configuration.xml",
         "test_tv_apm_configuration.xml",
     ],
diff --git a/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_configuration.xml b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_configuration.xml
new file mode 100644
index 0000000..dc2e7af
--- /dev/null
+++ b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_configuration.xml
@@ -0,0 +1,24 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+     -->
+
+<configuration version="1.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+
+    <xi:include href="test_audio_policy_engine_product_strategies.xml"/>
+    <xi:include href="test_audio_policy_engine_stream_volumes.xml"/>
+    <xi:include href="test_audio_policy_engine_default_stream_volumes.xml"/>
+
+</configuration>
+
diff --git a/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_default_stream_volumes.xml b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_default_stream_volumes.xml
new file mode 100644
index 0000000..d184cb5
--- /dev/null
+++ b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_default_stream_volumes.xml
@@ -0,0 +1,136 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- Default Volume Tables included by Audio Policy Configuration file -->
+<!-- Full Default Volume table for all device category -->
+<volumes>
+    <reference name="FULL_SCALE_VOLUME_CURVE">
+    <!-- Full Scale reference Volume Curve -->
+        <point>0,0</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="SILENT_VOLUME_CURVE">
+        <point>0,-9600</point>
+        <point>100,-9600</point>
+    </reference>
+    <reference name="DEFAULT_SYSTEM_VOLUME_CURVE">
+    <!-- Default System reference Volume Curve -->
+        <point>1,-2400</point>
+        <point>33,-1800</point>
+        <point>66,-1200</point>
+        <point>100,-600</point>
+    </reference>
+    <reference name="DEFAULT_MEDIA_VOLUME_CURVE">
+    <!-- Default Media reference Volume Curve -->
+        <point>1,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE">
+    <!--Default Volume Curve -->
+        <point>1,-4950</point>
+        <point>33,-3350</point>
+        <point>66,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE">
+    <!-- Default is Speaker Media Volume Curve -->
+        <point>1,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_SPEAKER_SYSTEM_VOLUME_CURVE">
+    <!-- Default is Speaker System Volume Curve -->
+        <point>1,-4680</point>
+        <point>42,-2070</point>
+        <point>85,-540</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_EARPIECE_VOLUME_CURVE">
+    <!--Default Volume Curve -->
+        <point>1,-4950</point>
+        <point>33,-3350</point>
+        <point>66,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE">
+    <!-- Default is Ext Media System Volume Curve -->
+        <point>1,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-2100</point>
+        <point>100,-1000</point>
+    </reference>
+    <reference name="DEFAULT_HEARING_AID_VOLUME_CURVE">
+    <!-- Default Hearing Aid Volume Curve -->
+        <point>1,-12700</point>
+        <point>20,-8000</point>
+        <point>60,-4000</point>
+        <point>100,0</point>
+    </reference>
+    <!-- **************************************************************** -->
+    <!-- Non-mutable default volume curves:                               -->
+    <!--     * first point is always for index 0                          -->
+    <!--     * attenuation is small enough that stream can still be heard -->
+    <reference name="DEFAULT_NON_MUTABLE_VOLUME_CURVE">
+    <!-- Default non-mutable reference Volume Curve -->
+    <!--        based on DEFAULT_MEDIA_VOLUME_CURVE -->
+        <point>0,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_HEADSET_VOLUME_CURVE">
+    <!--Default non-mutable Volume Curve for headset -->
+    <!--    based on DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE -->
+        <point>0,-4950</point>
+        <point>33,-3350</point>
+        <point>66,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_SPEAKER_VOLUME_CURVE">
+    <!-- Default non-mutable Speaker Volume Curve -->
+    <!--    based on DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE -->
+        <point>0,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_EARPIECE_VOLUME_CURVE">
+    <!--Default non-mutable Volume Curve -->
+    <!--    based on DEFAULT_DEVICE_CATEGORY_EARPIECE_VOLUME_CURVE -->
+        <point>0,-4950</point>
+        <point>33,-3350</point>
+        <point>66,-1700</point>
+        <point>100,0</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_EXT_VOLUME_CURVE">
+    <!-- Default non-mutable Ext Media System Volume Curve -->
+    <!--     based on DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE -->
+        <point>0,-5800</point>
+        <point>20,-4000</point>
+        <point>60,-2100</point>
+        <point>100,-1000</point>
+    </reference>
+    <reference name="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE">
+    <!-- Default non-mutable Hearing Aid Volume Curve -->
+    <!--     based on DEFAULT_HEARING_AID_VOLUME_CURVE -->
+        <point>0,-12700</point>
+        <point>20,-8000</point>
+        <point>60,-4000</point>
+        <point>100,0</point>
+    </reference>
+</volumes>
diff --git a/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_product_strategies.xml b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_product_strategies.xml
new file mode 100644
index 0000000..58e7152
--- /dev/null
+++ b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_product_strategies.xml
@@ -0,0 +1,98 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+     -->
+
+<ProductStrategies>
+
+    <!-- "hidden strategies" like TTS, enforced audible:
+            Shall we expose them here or keep it hard coded -->
+
+    <!-- Used to identify the volume of audio streams for enforced system sounds in certain
+         countries (e.g. camera in Japan)
+         This strategy will only have higher priority than phone if force for system is set to
+         enforced. -->
+
+    <ProductStrategy name="STRATEGY_PHONE">
+        <AttributesGroup streamType="AUDIO_STREAM_VOICE_CALL" volumeGroup="voice_call">
+            <Attributes> <Usage value="AUDIO_USAGE_VOICE_COMMUNICATION"/> </Attributes>
+        </AttributesGroup>
+        <AttributesGroup streamType="AUDIO_STREAM_BLUETOOTH_SCO" volumeGroup="bluetooth_sco">
+            <Attributes> <Flags value="AUDIO_FLAG_SCO"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_SONIFICATION">
+        <AttributesGroup streamType="AUDIO_STREAM_RING" volumeGroup="ring">
+            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE"/> </Attributes>
+        </AttributesGroup>
+        <AttributesGroup streamType="AUDIO_STREAM_ALARM" volumeGroup="alarm">
+            <Attributes> <Usage value="AUDIO_USAGE_ALARM"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_ENFORCED_AUDIBLE">
+        <AttributesGroup streamType="AUDIO_STREAM_ENFORCED_AUDIBLE" volumeGroup="enforced_audible">
+            <Attributes> <Flags value="AUDIO_FLAG_AUDIBILITY_ENFORCED"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_ACCESSIBILITY">
+        <AttributesGroup streamType="AUDIO_STREAM_ACCESSIBILITY" volumeGroup="accessibility">
+            <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_ACCESSIBILITY"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_SONIFICATION_RESPECTFUL">
+        <AttributesGroup streamType="AUDIO_STREAM_NOTIFICATION" volumeGroup="notification">
+            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION"/> </Attributes>
+            <Attributes> <Usage value="AUDIO_USAGE_NOTIFICATION_EVENT"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_MEDIA">
+        <AttributesGroup streamType="AUDIO_STREAM_ASSISTANT" volumeGroup="assistant">
+            <Attributes>
+                <ContentType value="AUDIO_CONTENT_TYPE_SPEECH"/>
+                <Usage value="AUDIO_USAGE_ASSISTANT"/>
+            </Attributes>
+        </AttributesGroup>
+         <AttributesGroup streamType="AUDIO_STREAM_MUSIC" volumeGroup="music">
+            <Attributes> <Usage value="AUDIO_USAGE_MEDIA"/> </Attributes>
+            <Attributes> <Usage value="AUDIO_USAGE_GAME"/> </Attributes>
+            <Attributes> <Usage value="AUDIO_USAGE_ASSISTANT"/> </Attributes>
+            <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_NAVIGATION_GUIDANCE"/> </Attributes>
+            <Attributes></Attributes>
+        </AttributesGroup>
+        <AttributesGroup streamType="AUDIO_STREAM_SYSTEM" volumeGroup="system">
+            <Attributes> <Usage value="AUDIO_USAGE_ASSISTANCE_SONIFICATION"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <ProductStrategy name="STRATEGY_DTMF">
+        <AttributesGroup streamType="AUDIO_STREAM_DTMF" volumeGroup="dtmf">
+            <Attributes> <Usage value="AUDIO_USAGE_VOICE_COMMUNICATION_SIGNALLING"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+    <!-- Used to identify the volume of audio streams exclusively transmitted through the  speaker
+         (TTS) of the device -->
+    <ProductStrategy name="STRATEGY_TRANSMITTED_THROUGH_SPEAKER">
+        <AttributesGroup streamType="AUDIO_STREAM_TTS" volumeGroup="tts">
+            <Attributes> <Flags value="AUDIO_FLAG_BEACON"/> </Attributes>
+        </AttributesGroup>
+    </ProductStrategy>
+
+</ProductStrategies>
+
diff --git a/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_stream_volumes.xml b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_stream_volumes.xml
new file mode 100644
index 0000000..af517cf
--- /dev/null
+++ b/services/audiopolicy/tests/resources/engine/test_audio_policy_engine_stream_volumes.xml
@@ -0,0 +1,221 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<!-- Volume section defines a volume curve for a given use case and device category.
+It contains a list of points of this curve expressing the attenuation in Millibels for a given
+volume index from 0 to 100.
+<volume deviceCategory=””>
+<point>0,-9600</point>
+<point>100,0</point>
+</volume>
+-->
+
+<volumeGroups>
+    <volumeGroup>
+        <name>voice_call</name>
+        <indexMin>1</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2400</point>
+            <point>33,-1600</point>
+            <point>66,-800</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE">
+            <point>0,-2700</point>
+            <point>33,-1800</point>
+            <point>66,-900</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>system</name>
+        <indexMin>0</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>1,-3000</point>
+            <point>33,-2600</point>
+            <point>66,-2200</point>
+            <point>100,-1800</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>1,-5100</point>
+            <point>57,-2800</point>
+            <point>71,-2500</point>
+            <point>85,-2300</point>
+            <point>100,-2100</point>
+        </volume>
+        <!--volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/-->
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>ring</name>
+        <indexMin>0</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_DEVICE_CATEGORY_EARPIECE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>music</name>
+        <indexMin>0</indexMin>
+        <indexMax>25</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID"  ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>alarm</name>
+        <indexMin>1</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_NON_MUTABLE_HEADSET_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_NON_MUTABLE_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_NON_MUTABLE_EARPIECE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_NON_MUTABLE_EXT_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>notification</name>
+        <indexMin>0</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_SYSTEM_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_DEVICE_CATEGORY_EARPIECE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_DEVICE_CATEGORY_HEADSET_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>bluetooth_sco</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>0,-2400</point>
+            <point>33,-1600</point>
+            <point>66,-800</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE">
+            <point>0,-4200</point>
+            <point>33,-2800</point>
+            <point>66,-1400</point>
+            <point>100,0</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>enforced_audible</name>
+        <indexMin>0</indexMin>
+        <indexMax>7</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>1,-3000</point>
+            <point>33,-2600</point>
+            <point>66,-2200</point>
+            <point>100,-1800</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>1,-3400</point>
+            <point>71,-2400</point>
+            <point>100,-2000</point>
+        </volume>
+        <!--volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/-->
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>dtmf</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET">
+            <point>1,-3000</point>
+            <point>33,-2600</point>
+            <point>66,-2200</point>
+            <point>100,-1800</point>
+        </volume>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER">
+            <point>1,-4000</point>
+            <point>71,-2400</point>
+            <point>100,-1400</point>
+        </volume>
+        <!--volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/-->
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_SYSTEM_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_DEVICE_CATEGORY_EXT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>tts</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="SILENT_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="FULL_SCALE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="SILENT_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="SILENT_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="SILENT_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>accessibility</name>
+        <indexMin>1</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_NON_MUTABLE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_NON_MUTABLE_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_NON_MUTABLE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_NON_MUTABLE_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID" ref="DEFAULT_NON_MUTABLE_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+    <volumeGroup>
+        <name>assistant</name>
+        <indexMin>0</indexMin>
+        <indexMax>15</indexMax>
+        <volume deviceCategory="DEVICE_CATEGORY_HEADSET" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_SPEAKER" ref="DEFAULT_DEVICE_CATEGORY_SPEAKER_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EARPIECE" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_EXT_MEDIA" ref="DEFAULT_MEDIA_VOLUME_CURVE"/>
+        <volume deviceCategory="DEVICE_CATEGORY_HEARING_AID"  ref="DEFAULT_HEARING_AID_VOLUME_CURVE"/>
+    </volumeGroup>
+
+</volumeGroups>
+
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index 1a299c6..3c64898 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -30,7 +30,7 @@
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                              samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
-                <mixPort name="primary input" role="sink">
+                <mixPort name="primary input" role="sink"  maxActiveCount="1" maxOpenCount="1">
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                              samplingRates="48000"
                              channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
@@ -66,6 +66,11 @@
                         channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
                 </mixPort>
                 <mixPort name="hifi_input" role="sink" />
+                <mixPort name="multiple_channels_input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000"
+                             channelMasks="AUDIO_CHANNEL_IN_5POINT1"/>
+                </mixPort>
             </mixPorts>
             <devicePorts>
                 <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
@@ -99,7 +104,7 @@
                 <route type="mix" sink="primary input"
                        sources="Built-In Mic,Hdmi-In Mic,USB Device In"/>
                 <route type="mix" sink="voip_tx"
-                       sources="Built-In Mic"/>
+                       sources="Built-In Mic,USB Device In"/>
                 <route type="mix" sink="Hdmi"
                        sources="primary output"/>
                 <route type="mix" sink="BT SCO"
@@ -114,6 +119,8 @@
                     sources="BUS Device In"/>
                 <route type="mix" sink="hifi_input"
                         sources="USB Device In" />
+                <route type="mix" sink="multiple_channels_input"
+                       sources="Built-In Mic" />
             </routes>
         </module>
 
diff --git a/services/audiopolicy/tests/resources/test_phone_apm_configuration.xml b/services/audiopolicy/tests/resources/test_phone_apm_configuration.xml
new file mode 100644
index 0000000..efe1400
--- /dev/null
+++ b/services/audiopolicy/tests/resources/test_phone_apm_configuration.xml
@@ -0,0 +1,279 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+          http://www.apache.org/licenses/LICENSE-2.0
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<audioPolicyConfiguration version="7.0" xmlns:xi="http://www.w3.org/2001/XInclude">
+    <globalConfiguration speaker_drc_enabled="false" call_screen_mode_supported="true" />
+    <modules>
+        <!-- Primary Audio HAL -->
+        <module name="primary" halVersion="2.0">
+            <attachedDevices>
+                <item>Speaker</item>
+                <item>Speaker Safe</item>
+                <item>Earpiece</item>
+                <item>Built-In Mic</item>
+                <item>Built-In Back Mic</item>
+                <item>Telephony Tx</item>
+                <item>Voice Call And Telephony Rx</item>
+                <item>Echo Ref In</item>
+            </attachedDevices>
+            <defaultOutputDevice>Speaker</defaultOutputDevice>
+            <mixPorts>
+                <mixPort name="primary output" role="source" flags="AUDIO_OUTPUT_FLAG_PRIMARY AUDIO_OUTPUT_FLAG_FAST"
+                         recommendedMuteDurationMs="40">
+                    <profile name="" format="AUDIO_FORMAT_PCM_FLOAT"
+                             samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="direct" role="source" flags="AUDIO_OUTPUT_FLAG_DIRECT"
+                         recommendedMuteDurationMs="40">
+                    <profile name="" format="AUDIO_FORMAT_PCM_FLOAT"
+                             samplingRates="48000 96000 384000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="deep buffer" role="source" flags="AUDIO_OUTPUT_FLAG_DEEP_BUFFER">
+                    <profile name="" format="AUDIO_FORMAT_PCM_FLOAT"
+                             samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="compressed_offload" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD AUDIO_OUTPUT_FLAG_NON_BLOCKING AUDIO_OUTPUT_FLAG_GAPLESS_OFFLOAD">
+                    <profile name="" format="AUDIO_FORMAT_MP3"
+                             samplingRates="8000 16000 24000 32000 44100 48000 96000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                    <profile name="" format="AUDIO_FORMAT_AAC_LC"
+                             samplingRates="8000 16000 24000 32000 44100 48000 96000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                    <profile name="" format="AUDIO_FORMAT_AAC_HE_V1"
+                             samplingRates="8000 16000 24000 32000 44100 48000 96000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                    <profile name="" format="AUDIO_FORMAT_AAC_HE_V2"
+                             samplingRates="8000 16000 24000 32000 44100 48000 96000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO AUDIO_CHANNEL_OUT_MONO"/>
+                    <profile name="" format="AUDIO_FORMAT_OPUS"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="haptic" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO_HAPTIC_AB" />
+                </mixPort>
+                <mixPort name="raw" role="source" flags="AUDIO_OUTPUT_FLAG_RAW AUDIO_OUTPUT_FLAG_FAST">
+                    <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                             samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="mmap_no_irq_out" role="source" flags="AUDIO_OUTPUT_FLAG_DIRECT AUDIO_OUTPUT_FLAG_MMAP_NOIRQ">
+                    <profile name="" format="AUDIO_FORMAT_PCM_FLOAT"
+                             samplingRates="48000 96000 384000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="immersive_out" role="source" flags="AUDIO_OUTPUT_FLAG_SPATIALIZER">
+                    <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                             samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="incall playback" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_INCALL_MUSIC">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO" />
+                </mixPort>
+                <mixPort name="voice call tx" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_OUT_STEREO" />
+                </mixPort>
+                <mixPort name="voip_rx" role="source"
+                         flags="AUDIO_OUTPUT_FLAG_VOIP_RX">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                           samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+                <mixPort name="primary input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO AUDIO_CHANNEL_INDEX_MASK_3"/>
+                </mixPort>
+                <mixPort name="hotword input" role="sink" flags="AUDIO_INPUT_FLAG_HW_HOTWORD" maxActiveCount="0" >
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="8000 11025 12000 16000 22050 24000 32000 44100 48000 96000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="incall capture" role="sink"  maxActiveCount="2" maxOpenCount="2">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="voice call rx" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="voip_tx" role="sink"
+                         flags="AUDIO_INPUT_FLAG_VOIP_TX">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_IN_MONO"/>
+                </mixPort>
+                <mixPort name="fast input" role="sink" flags="AUDIO_INPUT_FLAG_RAW AUDIO_INPUT_FLAG_FAST">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="mmap_no_irq_in" role="sink" flags="AUDIO_INPUT_FLAG_MMAP_NOIRQ">
+                    <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_IN_MONO AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+                <mixPort name="hifi_playback" role="source" />
+                <mixPort name="hifi_input" role="sink" />
+                <mixPort name="echo_ref_input" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_32_BIT"
+                             samplingRates="48000 96000"
+                             channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <!-- Output devices declaration, i.e. Sink DEVICE PORT -->
+                <devicePort tagName="Earpiece" type="AUDIO_DEVICE_OUT_EARPIECE" role="sink">
+                </devicePort>
+                <devicePort tagName="Speaker" type="AUDIO_DEVICE_OUT_SPEAKER" role="sink">
+                </devicePort>
+                <devicePort tagName="Speaker Safe" type="AUDIO_DEVICE_OUT_SPEAKER_SAFE" role="sink">
+                </devicePort>
+                <devicePort tagName="BT SCO" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO" role="sink">
+                </devicePort>
+                <devicePort tagName="BT SCO Headset" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET" role="sink">
+                </devicePort>
+                <devicePort tagName="BT SCO Car Kit" type="AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT" role="sink">
+                </devicePort>
+                <devicePort tagName="USB Device Out" type="AUDIO_DEVICE_OUT_USB_DEVICE" role="sink">
+                </devicePort>
+                <devicePort tagName="USB Headset Out" type="AUDIO_DEVICE_OUT_USB_HEADSET" role="sink">
+                </devicePort>
+                <devicePort tagName="HDMI Out" type="AUDIO_DEVICE_OUT_HDMI" role="sink">
+                </devicePort>
+                <devicePort tagName="Telephony Tx" type="AUDIO_DEVICE_OUT_TELEPHONY_TX" role="sink">
+                </devicePort>
+                <!-- Input devices declaration, i.e. Source DEVICE PORT -->
+                <devicePort tagName="Built-In Mic" type="AUDIO_DEVICE_IN_BUILTIN_MIC" role="source">
+                </devicePort>
+                <devicePort tagName="Built-In Back Mic" type="AUDIO_DEVICE_IN_BACK_MIC" role="source">
+                </devicePort>
+                <devicePort tagName="BT SCO Headset Mic" type="AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET" role="source">
+                </devicePort>
+                <devicePort tagName="BT A2DP Out" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP" role="sink"
+                            encodedFormats="AUDIO_FORMAT_OPUS AUDIO_FORMAT_AAC AUDIO_FORMAT_SBC">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="44100 48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="BT A2DP Headphones" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES" role="sink"
+                            encodedFormats="AUDIO_FORMAT_OPUS AUDIO_FORMAT_AAC AUDIO_FORMAT_SBC">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="44100 48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="BT A2DP Speaker" type="AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER" role="sink"
+                            encodedFormats="AUDIO_FORMAT_OPUS AUDIO_FORMAT_AAC AUDIO_FORMAT_SBC">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="44100 48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="BT BLE Headset" type="AUDIO_DEVICE_OUT_BLE_HEADSET" role="sink"
+                            encodedFormats="AUDIO_FORMAT_LC3">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="BT BLE Speaker" type="AUDIO_DEVICE_OUT_BLE_SPEAKER" role="sink"
+                            encodedFormats="AUDIO_FORMAT_LC3">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="BT BLE Broadcast" type="AUDIO_DEVICE_OUT_BLE_BROADCAST" role="sink"
+                            encodedFormats="AUDIO_FORMAT_LC3">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="48000 96000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+                <devicePort tagName="BLE Headset Mic" type="AUDIO_DEVICE_IN_BLE_HEADSET" role="source">
+                </devicePort>
+                <devicePort tagName="USB Device In" type="AUDIO_DEVICE_IN_USB_DEVICE" role="source">
+                </devicePort>
+                <devicePort tagName="USB Headset In" type="AUDIO_DEVICE_IN_USB_HEADSET" role="source">
+                </devicePort>
+                <!-- AUDIO_DEVICE_IN_VOICE_CALL and AUDIO_DEVICE_IN_TELEPHONY_RX are in the same value -->
+                <devicePort tagName="Voice Call And Telephony Rx" type="AUDIO_DEVICE_IN_VOICE_CALL" role="source">
+                </devicePort>
+                <devicePort tagName="Echo Ref In" type="AUDIO_DEVICE_IN_ECHO_REFERENCE" role="source">
+                </devicePort>
+            </devicePorts>
+            <!-- route declaration, i.e. list all available sources for a given sink -->
+            <routes>
+                <route type="mix" sink="Speaker"
+                       sources="primary output,direct,deep buffer,haptic,raw,mmap_no_irq_out,voip_rx,compressed_offload"/>
+                <route type="mix" sink="Speaker Safe"
+                       sources="primary output,direct,deep buffer,haptic,raw,mmap_no_irq_out,voip_rx,compressed_offload"/>
+                <route type="mix" sink="Earpiece"
+                       sources="primary output,direct,deep buffer,haptic,raw,mmap_no_irq_out,voip_rx,compressed_offload"/>
+                <route type="mix" sink="BT A2DP Out"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out,immersive_out"/>
+                <route type="mix" sink="BT A2DP Headphones"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out,immersive_out"/>
+                <route type="mix" sink="BT A2DP Speaker"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out,immersive_out"/>
+                <route type="mix" sink="BT BLE Headset"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out,immersive_out"/>
+                <route type="mix" sink="BT BLE Speaker"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out,immersive_out"/>
+                <route type="mix" sink="BT BLE Broadcast"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out,immersive_out"/>
+                <route type="mix" sink="USB Device Out"
+                       sources="primary output,direct,deep buffer,haptic,raw,mmap_no_irq_out,voip_rx,hifi_playback,compressed_offload,immersive_out"/>
+                <route type="mix" sink="USB Headset Out"
+                       sources="primary output,direct,deep buffer,haptic,raw,mmap_no_irq_out,voip_rx,hifi_playback,compressed_offload,immersive_out"/>
+                <route type="mix" sink="HDMI Out"
+                       sources="primary output,direct,deep buffer,haptic,raw,mmap_no_irq_out,voip_rx,compressed_offload"/>
+                <route type="mix" sink="BT SCO"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out"/>
+                <route type="mix" sink="BT SCO Headset"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out"/>
+                <route type="mix" sink="BT SCO Car Kit"
+                       sources="primary output,direct,deep buffer,haptic,voip_rx,compressed_offload,raw,mmap_no_irq_out"/>
+                <route type="mix" sink="Telephony Tx" sources="incall playback,voice call tx" />
+                <route type="mix" sink="primary input"
+                       sources="Built-In Mic,Built-In Back Mic,USB Device In,USB Headset In,BT SCO Headset Mic,BLE Headset Mic"/>
+                <route type="mix" sink="hotword input"
+                       sources="Built-In Mic,Built-In Back Mic,USB Device In,USB Headset In,BT SCO Headset Mic,BLE Headset Mic"/>
+                <route type="mix" sink="incall capture" sources="Voice Call And Telephony Rx" />
+                <route type="mix" sink="voice call rx" sources="Voice Call And Telephony Rx" />
+                <route type="mix" sink="voip_tx"
+                       sources="Built-In Mic,Built-In Back Mic,USB Device In,USB Headset In,BT SCO Headset Mic,BLE Headset Mic"/>
+                <route type="mix" sink="fast input"
+                       sources="Built-In Mic,Built-In Back Mic,USB Device In,USB Headset In,BT SCO Headset Mic,BLE Headset Mic"/>
+                <route type="mix" sink="mmap_no_irq_in"
+                       sources="Built-In Mic,Built-In Back Mic,USB Device In,USB Headset In,BT SCO Headset Mic,BLE Headset Mic"/>
+                <route type="mix" sink="hifi_input" sources="USB Device In,USB Headset In" />
+                <route type="mix" sink="echo_ref_input" sources="Echo Ref In"/>
+            </routes>
+        </module>
+        <!-- Usb Audio HAL -->
+        <module name="usbv2" halVersion="2.0">
+            <mixPorts>
+                <mixPort name="usb_accessory output" role="source">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="44100" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </mixPort>
+            </mixPorts>
+            <devicePorts>
+                <devicePort tagName="USB Host Out" type="AUDIO_DEVICE_OUT_USB_ACCESSORY" role="sink">
+                    <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
+                             samplingRates="44100" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
+                </devicePort>
+            </devicePorts>
+            <routes>
+                <route type="mix" sink="USB Host Out"
+                       sources="usb_accessory output"/>
+            </routes>
+        </module>
+    </modules>
+    <!-- End of Modules section -->
+</audioPolicyConfiguration>
diff --git a/services/audiopolicy/tests/spatializer_tests.cpp b/services/audiopolicy/tests/spatializer_tests.cpp
index 73bef43..0b40f32 100644
--- a/services/audiopolicy/tests/spatializer_tests.cpp
+++ b/services/audiopolicy/tests/spatializer_tests.cpp
@@ -33,6 +33,38 @@
 using media::audio::common::HeadTracking;
 using media::audio::common::Spatialization;
 
+// Test Spatializer Helper Methods
+
+TEST(Spatializer, containsImmersiveChannelMask) {
+    // Regardless of the implementation, we expect the following
+    // behavior.
+
+    // Pure non-immersive
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_STEREO }));
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO }));
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+              AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_MONO }));
+
+    // Pure immersive
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_5POINT1 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_7POINT1 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_7POINT1,
+              AUDIO_CHANNEL_OUT_22POINT2 }));
+
+    // Mixed immersive/non-immersive
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_7POINT1POINT4 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+              AUDIO_CHANNEL_OUT_7POINT1 }));
+}
+
 class TestSpatializerPolicyCallback :
         public SpatializerPolicyCallback {
 public:
@@ -68,7 +100,7 @@
         mSpatializer->setOutput(AUDIO_IO_HANDLE_NONE);
         mSpatializer->setDesiredHeadTrackingMode(HeadTracking::Mode::DISABLED);
         mSpatializer->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
-        mSpatializer->updateActiveTracks(0);
+        mSpatializer->updateActiveTracks({});
     }
 
     static constexpr audio_io_handle_t sTestOutput= 1977;
@@ -174,12 +206,12 @@
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
 
     // requested latency mode must be low if at least one spatialized tracks is active
-    mSpatializer->updateActiveTracks(1);
+    mSpatializer->updateActiveTracks({AUDIO_CHANNEL_OUT_5POINT1});
     requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_LOW);
 
     // requested latency mode must be free after stopping the last spatialized tracks
-    mSpatializer->updateActiveTracks(0);
+    mSpatializer->updateActiveTracks({});
     requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
 }
@@ -202,7 +234,7 @@
 
     // requested latency mode must be low software if at least one spatialized tracks is active
     // and the only supported low latency mode is low software
-    mSpatializer->updateActiveTracks(1);
+    mSpatializer->updateActiveTracks({AUDIO_CHANNEL_OUT_5POINT1});
     requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
 
@@ -225,7 +257,7 @@
     }
 
     // requested latency mode must be free after stopping the last spatialized tracks
-    mSpatializer->updateActiveTracks(0);
+    mSpatializer->updateActiveTracks({});
     requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
 }
diff --git a/services/audiopolicy/tests/test_execution_tracer.cpp b/services/audiopolicy/tests/test_execution_tracer.cpp
new file mode 100644
index 0000000..09de4a1
--- /dev/null
+++ b/services/audiopolicy/tests/test_execution_tracer.cpp
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "TestExecutionTracer"
+
+#include "test_execution_tracer.h"
+
+#include <android-base/logging.h>
+
+void TestExecutionTracer::OnTestStart(const ::testing::TestInfo& test_info) {
+    TraceTestState("Started", test_info);
+}
+
+void TestExecutionTracer::OnTestEnd(const ::testing::TestInfo& test_info) {
+    TraceTestState("Finished", test_info);
+}
+
+void TestExecutionTracer::OnTestPartResult(const ::testing::TestPartResult& result) {
+    if (result.failed()) {
+        LOG(ERROR) << result;
+    } else {
+        LOG(INFO) << result;
+    }
+}
+
+// static
+void TestExecutionTracer::TraceTestState(const std::string& state,
+                                         const ::testing::TestInfo& test_info) {
+    LOG(INFO) << state << " " << test_info.test_suite_name() << "::" << test_info.name();
+}
diff --git a/services/audiopolicy/tests/test_execution_tracer.h b/services/audiopolicy/tests/test_execution_tracer.h
new file mode 100644
index 0000000..9031aaf
--- /dev/null
+++ b/services/audiopolicy/tests/test_execution_tracer.h
@@ -0,0 +1,29 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <gtest/gtest.h>
+
+class TestExecutionTracer : public ::testing::EmptyTestEventListener {
+  public:
+    void OnTestStart(const ::testing::TestInfo& test_info) override;
+    void OnTestEnd(const ::testing::TestInfo& test_info) override;
+    void OnTestPartResult(const ::testing::TestPartResult& result) override;
+
+  private:
+    static void TraceTestState(const std::string& state, const ::testing::TestInfo& test_info);
+};
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index daaeae6..3f2a617 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -58,6 +58,7 @@
         "libcamera_metadata",
         "libfmq",
         "libgui",
+        "libguiflags",
         "libhardware",
         "libhidlbase",
         "libimage_io",
@@ -72,6 +73,7 @@
         "libsensorprivacy",
         "libstagefright",
         "libstagefright_foundation",
+        "libvendorsupport",
         "libxml2",
         "libyuv",
         "android.companion.virtual.virtualdevice_aidl-cpp",
@@ -87,6 +89,7 @@
         "android.hardware.common-V2-ndk",
         "android.hardware.common.fmq-V1-ndk",
         "camera_platform_flags_c_lib",
+        "com.android.window.flags.window-aconfig_flags_c_lib",
         "media_permission-aidl-cpp",
     ],
 
@@ -167,6 +170,7 @@
         "device3/Camera3OutputStreamInterface.cpp",
         "device3/Camera3OutputUtils.cpp",
         "device3/Camera3DeviceInjectionMethods.cpp",
+        "device3/deprecated/DeprecatedCamera3StreamSplitter.cpp",
         "device3/UHRCropAndMeteringRegionMapper.cpp",
         "device3/PreviewFrameSpacer.cpp",
         "device3/hidl/HidlCamera3Device.cpp",
@@ -263,9 +267,21 @@
         "liblog",
         "libutils",
         "libxml2",
-        "camera_platform_flags_c_lib",
     ],
 
+    target: {
+        android: {
+            shared_libs: [
+                "camera_platform_flags_c_lib",
+            ],
+        },
+        host: {
+            shared_libs: [
+                "camera_platform_flags_c_lib_for_test",
+            ],
+        },
+    },
+
     include_dirs: [
         "frameworks/av/camera/include",
         "frameworks/av/camera/include/camera",
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index d9d8a3d..d21241b 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -22,13 +22,9 @@
 #include <utils/Trace.h>
 #include <cutils/properties.h>
 
-#include "camera/CameraMetadata.h"
 #include "CameraFlashlight.h"
-#include "gui/IGraphicBufferConsumer.h"
-#include "gui/BufferQueue.h"
+#include "camera/CameraMetadata.h"
 #include "camera/camera2/CaptureRequest.h"
-#include "device3/Camera3Device.h"
-
 
 namespace android {
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index b34c268..6a5d5e0 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -82,13 +82,13 @@
 #include "CameraService.h"
 #include "api1/Camera2Client.h"
 #include "api2/CameraDeviceClient.h"
-#include "utils/CameraTraces.h"
-#include "utils/TagMonitor.h"
 #include "utils/CameraServiceProxyWrapper.h"
+#include "utils/CameraTraces.h"
 #include "utils/SessionConfigurationUtils.h"
+#include "utils/TagMonitor.h"
+#include "utils/Utils.h"
 
 namespace {
-    const char* kPermissionServiceName = "permission";
     const char* kActivityServiceName = "activity";
     const char* kSensorPrivacyServiceName = "sensor_privacy";
     const char* kAppopsServiceName = "appops";
@@ -165,6 +165,8 @@
 const std::string CameraService::kOfflineDevice("offline-");
 const std::string CameraService::kWatchAllClientsFlag("all");
 
+constexpr int32_t kInvalidDeviceId = -1;
+
 // Set to keep track of logged service error events.
 static std::set<std::string> sServiceErrorEventSet;
 
@@ -339,25 +341,6 @@
                 mappedCameraId, deviceId);
         i->handleBinderStatus(ret, "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
                 __FUNCTION__, i->getListenerUid(), i->getListenerPid(), ret.exceptionCode());
-
-        // Only cameras of the default device can be remapped to a different camera (using
-        // remapCameraIds method), so do the following only if the camera is associated with the
-        // default device.
-        if (deviceId == kDefaultDeviceId) {
-            // For the default device, also trigger the torch callbacks for cameras that were
-            // remapped to the current cameraId for the specific package that this listener belongs
-            // to.
-            std::vector<std::string> remappedCameraIds =
-                    findOriginalIdsForRemappedCameraId(cameraId, i->getListenerUid());
-            for (auto &remappedCameraId: remappedCameraIds) {
-                ret = i->getListener()->onTorchStatusChanged(mapToInterface(status),
-                        remappedCameraId, kDefaultDeviceId);
-                i->handleBinderStatus(ret,
-                        "%s: Failed to trigger onTorchStatusChanged for %d:%d: %d",
-                        __FUNCTION__, i->getListenerUid(), i->getListenerPid(),
-                        ret.exceptionCode());
-            }
-        }
     }
 }
 
@@ -379,7 +362,8 @@
         if (vd_flags::camera_device_awareness()) {
             CameraMetadata cameraInfo;
             status_t res = mCameraProviderManager->getCameraCharacteristics(
-                    cameraId, false, &cameraInfo, false);
+                    cameraId, false, &cameraInfo,
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE);
             int32_t deviceId = kDefaultDeviceId;
             if (res != OK) {
                 ALOGW("%s: Not able to get camera characteristics for camera id %s",
@@ -442,8 +426,9 @@
         int facing = -1;
         int orientation = 0;
         int portraitRotation;
-        getDeviceVersion(cameraId, /*overrideToPortrait*/false, /*out*/&portraitRotation,
-                /*out*/&facing, /*out*/&orientation);
+        getDeviceVersion(cameraId,
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                /*out*/&portraitRotation, /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\" facing", __FUNCTION__, cameraId.c_str());
             return;
@@ -527,7 +512,7 @@
 void CameraService::onDeviceStatusChanged(const std::string& cameraId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, newStatus=%d", __FUNCTION__,
-            cameraId.c_str(), newHalStatus);
+            cameraId.c_str(), eToI(newHalStatus));
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -551,7 +536,8 @@
     StatusInternal oldStatus = state->getStatus();
 
     if (oldStatus == newStatus) {
-        ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__, newStatus);
+        ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__,
+                eToI(newStatus));
         return;
     }
 
@@ -594,7 +580,7 @@
         const std::string& physicalId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, physicalCameraId=%s, newStatus=%d",
-            __FUNCTION__, id.c_str(), physicalId.c_str(), newHalStatus);
+            __FUNCTION__, id.c_str(), physicalId.c_str(), eToI(newHalStatus));
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -610,7 +596,7 @@
     if (logicalCameraStatus != StatusInternal::PRESENT &&
             logicalCameraStatus != StatusInternal::NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalId.c_str(), newHalStatus, logicalCameraStatus);
+                __FUNCTION__, physicalId.c_str(), eToI(newHalStatus), eToI(logicalCameraStatus));
         return;
     }
 
@@ -702,7 +688,7 @@
 void CameraService::onTorchStatusChangedLocked(const std::string& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
-            __FUNCTION__, cameraId.c_str(), newStatus);
+            __FUNCTION__, cameraId.c_str(), eToI(newStatus));
 
     TorchModeStatus status;
     status_t res = getTorchStatusLocked(cameraId, &status);
@@ -775,7 +761,7 @@
 
     CameraMetadata cameraInfo;
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cam_id, false, &cameraInfo, false);
+            cam_id, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
     if (res != OK){
         ALOGE("%s: Not able to get camera characteristics for camera id %s",__FUNCTION__,
                 cam_id.c_str());
@@ -797,12 +783,13 @@
     return true;
 }
 
-Status CameraService::getNumberOfCameras(int32_t type, int32_t deviceId, int32_t devicePolicy,
+Status CameraService::getNumberOfCameras(int32_t type,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         int32_t* numCameras) {
     ATRACE_CALL();
-    if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+    if (vd_flags::camera_device_awareness() && (clientAttribution.deviceId != kDefaultDeviceId)
             && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
-        *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(deviceId);
+        *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(clientAttribution.deviceId);
         return Status::ok();
     }
 
@@ -834,45 +821,23 @@
     return Status::ok();
 }
 
-Status CameraService::remapCameraIds(const hardware::CameraIdRemapping& cameraIdRemapping) {
-    if (!checkCallingPermission(toString16(sCameraInjectExternalCameraPermission))) {
-        const int pid = getCallingPid();
-        const int uid = getCallingUid();
-        ALOGE("%s: Permission Denial: can't configure camera ID mapping pid=%d, uid=%d",
-                __FUNCTION__, pid, uid);
-        return STATUS_ERROR(ERROR_PERMISSION_DENIED,
-                "Permission Denial: no permission to configure camera id mapping");
-    }
-    TCameraIdRemapping cameraIdRemappingMap{};
-    binder::Status parseStatus = parseCameraIdRemapping(cameraIdRemapping, &cameraIdRemappingMap);
-    if (!parseStatus.isOk()) {
-        return parseStatus;
-    }
-    remapCameraIds(cameraIdRemappingMap);
-    return Status::ok();
-}
-
 Status CameraService::createDefaultRequest(const std::string& unresolvedCameraId, int templateId,
-        int32_t deviceId, int32_t devicePolicy,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /* out */
         hardware::camera2::impl::CameraMetadataNative* request) {
     ATRACE_CALL();
 
-    if (!flags::feature_combination_query()) {
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
-                "Camera subsystem doesn't support this method!");
-    }
     if (!mInitialized) {
         ALOGE("%s: Camera subsystem is not available", __FUNCTION__);
         logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, getCallingUid());
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -917,25 +882,21 @@
 Status CameraService::isSessionConfigurationWithParametersSupported(
         const std::string& unresolvedCameraId, int targetSdkVersion,
         const SessionConfiguration& sessionConfiguration,
-        int32_t deviceId, int32_t devicePolicy,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/ bool* supported) {
     ATRACE_CALL();
 
-    if (!flags::feature_combination_query()) {
-        return STATUS_ERROR(CameraService::ERROR_INVALID_OPERATION,
-                "Camera subsystem doesn't support this method!");
-    }
     if (!mInitialized) {
         ALOGE("%s: Camera HAL couldn't be initialized", __FUNCTION__);
         logServiceError("Camera subsystem is not available", ERROR_DISCONNECTED);
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, getCallingUid());
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -954,12 +915,16 @@
                 cameraId.c_str());
     }
 
-    bool overrideForPerfClass = flags::calculate_perf_override_during_session_support() &&
-                                SessionConfigurationUtils::targetPerfClassPrimaryCamera(
+    bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                                         mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
 
-    return isSessionConfigurationWithParametersSupportedUnsafe(cameraId, sessionConfiguration,
-                                                               overrideForPerfClass, supported);
+    auto ret = isSessionConfigurationWithParametersSupportedUnsafe(cameraId,
+            sessionConfiguration, overrideForPerfClass, supported);
+    if (flags::analytics_24q3()) {
+        mCameraServiceProxyWrapper->logFeatureCombinationQuery(cameraId,
+                getCallingUid(), sessionConfiguration, ret);
+    }
+    return ret;
 }
 
 Status CameraService::isSessionConfigurationWithParametersSupportedUnsafe(
@@ -1008,8 +973,9 @@
 }
 
 Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
-        int targetSdkVersion, bool overrideToPortrait,
-        const SessionConfiguration& sessionConfiguration, int32_t deviceId, int32_t devicePolicy,
+        int targetSdkVersion, int rotationOverride,
+        const SessionConfiguration& sessionConfiguration,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/ CameraMetadata* outMetadata) {
     ATRACE_CALL();
 
@@ -1026,11 +992,11 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-                                                                  devicePolicy, getCallingUid());
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                                       unresolvedCameraId.c_str(), deviceId);
+                                       unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1045,27 +1011,27 @@
 
     bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
             mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
-    if (flags::check_session_support_before_session_char()) {
-        bool sessionConfigSupported;
-        Status res = isSessionConfigurationWithParametersSupportedUnsafe(
-                cameraId, sessionConfiguration, overrideForPerfClass, &sessionConfigSupported);
-        if (!res.isOk()) {
-            // isSessionConfigurationWithParametersSupportedUnsafe should log what went wrong and
-            // report the correct Status to send to the client. Simply forward the error to
-            // the client.
-            outMetadata->clear();
-            return res;
-        }
-        if (!sessionConfigSupported) {
-            std::string msg = fmt::sprintf(
-                    "Session configuration not supported for camera device %s.", cameraId.c_str());
-            outMetadata->clear();
-            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
-        }
+
+    bool sessionConfigSupported;
+    Status res = isSessionConfigurationWithParametersSupportedUnsafe(
+            cameraId, sessionConfiguration, overrideForPerfClass, &sessionConfigSupported);
+    if (!res.isOk()) {
+        // isSessionConfigurationWithParametersSupportedUnsafe should log what went wrong and
+        // report the correct Status to send to the client. Simply forward the error to
+        // the client.
+        outMetadata->clear();
+        return res;
+    }
+
+    if (!sessionConfigSupported) {
+        std::string msg = fmt::sprintf("Session configuration not supported for camera device %s.",
+                                       cameraId.c_str());
+        outMetadata->clear();
+        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     status_t ret = mCameraProviderManager->getSessionCharacteristics(
-            cameraId, sessionConfiguration, overrideForPerfClass, overrideToPortrait, outMetadata);
+            cameraId, sessionConfiguration, overrideForPerfClass, rotationOverride, outMetadata);
 
     switch (ret) {
         case OK:
@@ -1103,7 +1069,12 @@
             }
     }
 
-    return filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
+    res = filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
+    if (flags::analytics_24q3()) {
+        mCameraServiceProxyWrapper->logSessionCharacteristicsQuery(cameraId,
+                getCallingUid(), sessionConfiguration, res);
+    }
+    return res;
 }
 
 Status CameraService::filterSensitiveMetadataIfNeeded(
@@ -1166,120 +1137,6 @@
     return Status::ok();
 }
 
-Status CameraService::parseCameraIdRemapping(
-        const hardware::CameraIdRemapping& cameraIdRemapping,
-        /* out */ TCameraIdRemapping* cameraIdRemappingMap) {
-    std::string packageName;
-    std::string cameraIdToReplace, updatedCameraId;
-    for (const auto& packageIdRemapping: cameraIdRemapping.packageIdRemappings) {
-        packageName = packageIdRemapping.packageName;
-        if (packageName.empty()) {
-            return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
-                    "CameraIdRemapping: Package name cannot be empty");
-        }
-        if (packageIdRemapping.cameraIdsToReplace.size()
-            != packageIdRemapping.updatedCameraIds.size()) {
-            return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                    "CameraIdRemapping: Mismatch in CameraId Remapping lists sizes for package %s",
-                    packageName.c_str());
-        }
-        for (size_t i = 0; i < packageIdRemapping.cameraIdsToReplace.size(); i++) {
-            cameraIdToReplace = packageIdRemapping.cameraIdsToReplace[i];
-            updatedCameraId = packageIdRemapping.updatedCameraIds[i];
-            if (cameraIdToReplace.empty() || updatedCameraId.empty()) {
-                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: Camera Id cannot be empty for package %s",
-                        packageName.c_str());
-            }
-            if (cameraIdToReplace == updatedCameraId) {
-                return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: CameraIdToReplace cannot be the same"
-                        " as updatedCameraId for %s",
-                        packageName.c_str());
-            }
-
-            // Do not allow any camera remapping that involves a virtual camera.
-            auto [deviceIdForCameraToReplace, _] =
-                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(
-                            cameraIdToReplace);
-            if (deviceIdForCameraToReplace != kDefaultDeviceId) {
-                return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: CameraIdToReplace cannot be a virtual camera");
-            }
-            [[maybe_unused]] auto [deviceIdForUpdatedCamera, unusedMappedCameraId] =
-                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(updatedCameraId);
-            if (deviceIdForUpdatedCamera != kDefaultDeviceId) {
-                return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT,
-                        "CameraIdRemapping: UpdatedCameraId cannot be a virtual camera");
-            }
-
-            (*cameraIdRemappingMap)[packageName][cameraIdToReplace] = updatedCameraId;
-        }
-    }
-    return Status::ok();
-}
-
-void CameraService::remapCameraIds(const TCameraIdRemapping& cameraIdRemapping) {
-    // Acquire mServiceLock and prevent other clients from connecting
-    std::unique_ptr<AutoConditionLock> serviceLockWrapper =
-            AutoConditionLock::waitAndAcquire(mServiceLockWrapper);
-
-    // Collect all existing clients for camera Ids that are being
-    // remapped in the new cameraIdRemapping, but only if they were being used by a
-    // targeted packageName.
-    std::vector<sp<BasicClient>> clientsToDisconnect;
-    std::vector<std::string> cameraIdsToUpdate;
-    for (const auto& [packageName, injectionMap] : cameraIdRemapping) {
-        for (auto& [id0, id1] : injectionMap) {
-            ALOGI("%s: UPDATE:= %s: %s: %s", __FUNCTION__, packageName.c_str(),
-                    id0.c_str(), id1.c_str());
-            auto clientDescriptor = mActiveClientManager.get(id0);
-            if (clientDescriptor != nullptr) {
-                sp<BasicClient> clientSp = clientDescriptor->getValue();
-                if (clientSp->getPackageName() == packageName) {
-                    // This camera is being used by a targeted packageName and
-                    // being remapped to a new camera Id. We should disconnect it.
-                    clientsToDisconnect.push_back(clientSp);
-                    cameraIdsToUpdate.push_back(id0);
-                }
-            }
-        }
-    }
-
-    for (auto& clientSp : clientsToDisconnect) {
-        // Notify the clients about the disconnection.
-        clientSp->notifyError(hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_DISCONNECTED,
-                CaptureResultExtras{});
-    }
-
-    // Do not hold mServiceLock while disconnecting clients, but retain the condition
-    // blocking other clients from connecting in mServiceLockWrapper if held.
-    mServiceLock.unlock();
-
-    // Clear calling identity for disconnect() PID checks.
-    int64_t token = clearCallingIdentity();
-
-    // Disconnect clients.
-    for (auto& clientSp : clientsToDisconnect) {
-        // This also triggers a call to updateStatus() which also reads mCameraIdRemapping
-        // and requires mCameraIdRemappingLock.
-        clientSp->disconnect();
-    }
-
-    // Invoke destructors (which call disconnect()) now while we don't hold the mServiceLock.
-    clientsToDisconnect.clear();
-
-    restoreCallingIdentity(token);
-    mServiceLock.lock();
-
-    {
-        Mutex::Autolock lock(mCameraIdRemappingLock);
-        // Update mCameraIdRemapping.
-        mCameraIdRemapping.clear();
-        mCameraIdRemapping.insert(cameraIdRemapping.begin(), cameraIdRemapping.end());
-    }
-}
-
 Status CameraService::injectSessionParams(
         const std::string& cameraId,
         const CameraMetadata& sessionParams) {
@@ -1320,56 +1177,10 @@
     return Status::ok();
 }
 
-std::vector<std::string> CameraService::findOriginalIdsForRemappedCameraId(
-    const std::string& inputCameraId, int clientUid) {
-    std::string packageName = getPackageNameFromUid(clientUid);
-    std::vector<std::string> cameraIds;
-    Mutex::Autolock lock(mCameraIdRemappingLock);
-    if (auto packageMapIter = mCameraIdRemapping.find(packageName);
-        packageMapIter != mCameraIdRemapping.end()) {
-        for (auto& [id0, id1]: packageMapIter->second) {
-            if (id1 == inputCameraId) {
-                cameraIds.push_back(id0);
-            }
-        }
-    }
-    return cameraIds;
-}
-
-std::string CameraService::resolveCameraId(
-    const std::string& inputCameraId,
-    int clientUid,
-    const std::string& packageName) {
-    std::string packageNameVal = packageName;
-    if (packageName.empty()) {
-        packageNameVal = getPackageNameFromUid(clientUid);
-    }
-    if (clientUid < AID_APP_START || packageNameVal.empty()) {
-        // We shouldn't remap cameras for processes with system/vendor UIDs.
-        return inputCameraId;
-    }
-    Mutex::Autolock lock(mCameraIdRemappingLock);
-    if (auto packageMapIter = mCameraIdRemapping.find(packageNameVal);
-        packageMapIter != mCameraIdRemapping.end()) {
-        auto packageMap = packageMapIter->second;
-        if (auto replacementIdIter = packageMap.find(inputCameraId);
-            replacementIdIter != packageMap.end()) {
-            ALOGI("%s: resolveCameraId: remapping cameraId %s for %s to %s",
-                    __FUNCTION__, inputCameraId.c_str(),
-                    packageNameVal.c_str(),
-                    replacementIdIter->second.c_str());
-            return replacementIdIter->second;
-        }
-    }
-    return inputCameraId;
-}
-
 std::optional<std::string> CameraService::resolveCameraId(
         const std::string& inputCameraId,
         int32_t deviceId,
-        int32_t devicePolicy,
-        int clientUid,
-        const std::string& packageName) {
+        int32_t devicePolicy) {
     if ((deviceId == kDefaultDeviceId)
             || (devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
         auto [storedDeviceId, _] =
@@ -1381,20 +1192,22 @@
             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             return std::nullopt;
         }
-        return resolveCameraId(inputCameraId, clientUid, packageName);
+        return inputCameraId;
     }
 
     return mVirtualDeviceCameraIdMapper.getActualCameraId(deviceId, inputCameraId);
 }
 
-Status CameraService::getCameraInfo(int cameraId, bool overrideToPortrait, int32_t deviceId,
-        int32_t devicePolicy, CameraInfo* cameraInfo) {
+Status CameraService::getCameraInfo(int cameraId,  int rotationOverride,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
-    std::string cameraIdStr = cameraIdIntToStrLocked(cameraId, deviceId, devicePolicy);
+    std::string cameraIdStr =
+            cameraIdIntToStrLocked(cameraId, clientAttribution.deviceId, devicePolicy);
     if (cameraIdStr.empty()) {
         std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
-                cameraId, deviceId);
+                cameraId, clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1423,7 +1236,7 @@
     Status ret = Status::ok();
     int portraitRotation;
     status_t err = mCameraProviderManager->getCameraInfo(
-            cameraIdStr, overrideToPortrait, &portraitRotation, cameraInfo);
+            cameraIdStr, rotationOverride, &portraitRotation, cameraInfo);
     if (err != OK) {
         ret = STATUS_ERROR_FMT(ERROR_INVALID_OPERATION,
                 "Error retrieving camera info from device %d: %s (%d)", cameraId,
@@ -1458,8 +1271,7 @@
         return std::string{};
     }
 
-    std::string unresolvedCameraId = (*cameraIds)[cameraIdInt];
-    return resolveCameraId(unresolvedCameraId, getCallingUid());
+    return (*cameraIds)[cameraIdInt];
 }
 
 std::string CameraService::cameraIdIntToStr(int cameraIdInt, int32_t deviceId,
@@ -1469,8 +1281,8 @@
 }
 
 Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
-        int targetSdkVersion, bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
-        CameraMetadata* cameraInfo) {
+        int targetSdkVersion, int rotationOverride, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
 
     if (!cameraInfo) {
@@ -1485,11 +1297,11 @@
                 "Camera subsystem is not available");;
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, getCallingUid());
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1504,7 +1316,7 @@
             SessionConfigurationUtils::targetPerfClassPrimaryCamera(mPerfClassPrimaryCameraIds,
                     cameraId, targetSdkVersion);
     status_t res = mCameraProviderManager->getCameraCharacteristics(
-            cameraId, overrideForPerfClass, cameraInfo, overrideToPortrait);
+            cameraId, overrideForPerfClass, cameraInfo, rotationOverride);
     if (res != OK) {
         if (res == NAME_NOT_FOUND) {
             return STATUS_ERROR_FMT(ERROR_ILLEGAL_ARGUMENT, "Unable to retrieve camera "
@@ -1522,16 +1334,17 @@
     return filterSensitiveMetadataIfNeeded(cameraId, cameraInfo);
 }
 
-Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId, int32_t deviceId,
+Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
+        const AttributionSourceState& clientAttribution,
         int32_t devicePolicy, int32_t* torchStrength) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, getCallingUid());
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1600,7 +1413,8 @@
 }
 
 std::pair<int, IPCTransport> CameraService::getDeviceVersion(const std::string& cameraId,
-        bool overrideToPortrait, int* portraitRotation, int* facing, int* orientation) {
+        int rotationOverride, int* portraitRotation, int* facing,
+        int* orientation) {
     ATRACE_CALL();
 
     int deviceVersion = 0;
@@ -1618,7 +1432,7 @@
 
     hardware::CameraInfo info;
     if (facing) {
-        res = mCameraProviderManager->getCameraInfo(cameraId, overrideToPortrait,
+        res = mCameraProviderManager->getCameraInfo(cameraId, rotationOverride,
                 portraitRotation, &info);
         if (res != OK) {
             return std::make_pair(-1, IPCTransport::INVALID);
@@ -1654,7 +1468,7 @@
         const std::optional<std::string>& featureId,  const std::string& cameraId,
         int api1CameraId, int facing, int sensorOrientation, int clientPid, uid_t clientUid,
         int servicePid, std::pair<int, IPCTransport> deviceVersionAndTransport,
-        apiLevel effectiveApiLevel, bool overrideForPerfClass, bool overrideToPortrait,
+        apiLevel effectiveApiLevel, bool overrideForPerfClass, int rotationOverride,
         bool forceSlowJpegMode, const std::string& originalCameraId,
         /*out*/sp<BasicClient>* client) {
     // For HIDL devices
@@ -1690,10 +1504,10 @@
         *client = new Camera2Client(cameraService, tmp, cameraService->mCameraServiceProxyWrapper,
                 cameraService->mAttributionAndPermissionUtils, packageName, featureId, cameraId,
                 api1CameraId, facing, sensorOrientation,
-                clientPid, clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                clientPid, clientUid, servicePid, overrideForPerfClass, rotationOverride,
                 forceSlowJpegMode);
-        ALOGI("%s: Camera1 API (legacy), override to portrait %d, forceSlowJpegMode %d",
-                __FUNCTION__, overrideToPortrait, forceSlowJpegMode);
+        ALOGI("%s: Camera1 API (legacy), rotationOverride %d, forceSlowJpegMode %d",
+                __FUNCTION__, rotationOverride, forceSlowJpegMode);
     } else { // Camera2 API route
         sp<hardware::camera2::ICameraDeviceCallbacks> tmp =
                 static_cast<hardware::camera2::ICameraDeviceCallbacks*>(cameraCb.get());
@@ -1701,8 +1515,8 @@
                 cameraService->mCameraServiceProxyWrapper,
                 cameraService->mAttributionAndPermissionUtils, packageName, systemNativeClient,
                 featureId, cameraId, facing, sensorOrientation, clientPid, clientUid, servicePid,
-                overrideForPerfClass, overrideToPortrait, originalCameraId);
-        ALOGI("%s: Camera2 API, override to portrait %d", __FUNCTION__, overrideToPortrait);
+                overrideForPerfClass, rotationOverride, originalCameraId);
+        ALOGI("%s: Camera2 API, rotationOverride %d", __FUNCTION__, rotationOverride);
     }
     return Status::ok();
 }
@@ -1718,7 +1532,7 @@
             s << ", " << std::to_string(i);
         }
     }
-    return std::move(s.str());
+    return s.str();
 }
 
 int32_t CameraService::mapToInterface(TorchModeStatus status) {
@@ -1734,7 +1548,7 @@
             serviceStatus = ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON;
             break;
         default:
-            ALOGW("Unknown new flash status: %d", status);
+            ALOGW("Unknown new flash status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1752,7 +1566,7 @@
             serviceStatus = StatusInternal::ENUMERATING;
             break;
         default:
-            ALOGW("Unknown new HAL device status: %d", status);
+            ALOGW("Unknown new HAL device status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1776,7 +1590,7 @@
             serviceStatus = ICameraServiceListener::STATUS_UNKNOWN;
             break;
         default:
-            ALOGW("Unknown new internal device status: %d", status);
+            ALOGW("Unknown new internal device status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1787,12 +1601,17 @@
     std::string cameraIdStr = std::to_string(cameraId);
     Status ret = Status::ok();
     sp<Client> tmp = nullptr;
+
+    int callingPid = getCallingPid();
+    logConnectionAttempt(callingPid, kServiceName, cameraIdStr, API_1);
+
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, cameraIdStr, cameraId,
-            kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
+            kServiceName, /*systemNativeClient*/ false, {}, uid, callingPid,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
-            /*targetSdkVersion*/ __ANDROID_API_FUTURE__, /*overrideToPortrait*/ true,
-            /*forceSlowJpegMode*/false, cameraIdStr, /*out*/ tmp)
+            /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
+            /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+            /*forceSlowJpegMode*/false, cameraIdStr, /*isNonSystemNdk*/ false, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
     }
@@ -1812,9 +1631,7 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "Parameters must not be null");
     }
 
-    std::string unresolvedCameraId = std::to_string(cameraId);
-    std::string cameraIdStr = resolveCameraId(unresolvedCameraId,
-            getCallingUid());
+    std::string cameraIdStr = std::to_string(cameraId);
 
     // Check if we already have parameters
     {
@@ -1864,17 +1681,14 @@
 }
 
 Status CameraService::validateConnectLocked(const std::string& cameraId,
-        const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
-        /*out*/int& originalClientPid) const {
+        const std::string& clientName8, int clientUid, int clientPid) const {
 
 #ifdef __BRILLO__
     UNUSED(clientName8);
     UNUSED(clientUid);
     UNUSED(clientPid);
-    UNUSED(originalClientPid);
 #else
-    Status allowed = validateClientPermissionsLocked(cameraId, clientName8, clientUid, clientPid,
-            originalClientPid);
+    Status allowed = validateClientPermissionsLocked(cameraId, clientName8, clientUid, clientPid);
     if (!allowed.isOk()) {
         return allowed;
     }
@@ -1911,37 +1725,24 @@
     return Status::ok();
 }
 
-Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
-        const std::string& clientName, int& clientUid, int& clientPid,
-        /*out*/int& originalClientPid) const {
+Status CameraService::errorNotTrusted(int clientPid, int clientUid, const std::string& cameraId,
+        const std::string& clientName, bool isPid) const {
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
+    ALOGE("CameraService::connect X (calling PID %d, calling UID %d) rejected "
+            "(don't trust %s %d)", callingPid, callingUid, isPid ? "clientPid" : "clientUid",
+            isPid ? clientPid : clientUid);
+    return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
+            "Untrusted caller (calling PID %d, UID %d) trying to "
+            "forward camera access to camera %s for client %s (PID %d, UID %d)",
+            getCallingPid(), getCallingUid(), cameraId.c_str(),
+            clientName.c_str(), clientPid, clientUid);
+}
 
-    // Check if we can trust clientUid
-    if (clientUid == USE_CALLING_UID) {
-        clientUid = callingUid;
-    } else if (!isTrustedCallingUid(callingUid)) {
-        ALOGE("CameraService::connect X (calling PID %d, calling UID %d) rejected "
-                "(don't trust clientUid %d)", callingPid, callingUid, clientUid);
-        return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                "Untrusted caller (calling PID %d, UID %d) trying to "
-                "forward camera access to camera %s for client %s (PID %d, UID %d)",
-                callingPid, callingUid, cameraId.c_str(),
-                clientName.c_str(), clientPid, clientUid);
-    }
-
-    // Check if we can trust clientPid
-    if (clientPid == USE_CALLING_PID) {
-        clientPid = callingPid;
-    } else if (!isTrustedCallingUid(callingUid)) {
-        ALOGE("CameraService::connect X (calling PID %d, calling UID %d) rejected "
-                "(don't trust clientPid %d)", callingPid, callingUid, clientPid);
-        return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
-                "Untrusted caller (calling PID %d, UID %d) trying to "
-                "forward camera access to camera %s for client %s (PID %d, UID %d)",
-                callingPid, callingUid, cameraId.c_str(),
-                clientName.c_str(), clientPid, clientUid);
-    }
+Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
+        const std::string& clientName, int clientUid, int clientPid) const {
+    int callingPid = getCallingPid();
+    int callingUid = getCallingUid();
 
     if (shouldRejectSystemCameraConnection(cameraId)) {
         ALOGW("Attempting to connect to system-only camera id %s, connection rejected",
@@ -1996,12 +1797,10 @@
                 "is enabled", clientName.c_str(), clientPid, clientUid, cameraId.c_str());
     }
 
+    userid_t clientUserId = multiuser_get_user_id(clientUid);
+
     // Only use passed in clientPid to check permission. Use calling PID as the client PID that's
     // connected to camera service directly.
-    originalClientPid = clientPid;
-    clientPid = callingPid;
-
-    userid_t clientUserId = multiuser_get_user_id(clientUid);
 
     // For non-system clients : Only allow clients who are being used by the current foreground
     // device user, unless calling from our own process.
@@ -2022,11 +1821,11 @@
         if (isHeadlessSystemUserMode()
                 && (clientUserId == USER_SYSTEM)
                 && !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
-            ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
+            ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", callingPid, clientUid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                     "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System \
                     User without camera headless system user permission",
-                    clientName.c_str(), clientPid, clientUid, cameraId.c_str());
+                    clientName.c_str(), callingPid, clientUid, cameraId.c_str());
         }
     }
 
@@ -2311,35 +2110,59 @@
 Status CameraService::connect(
         const sp<ICameraClient>& cameraClient,
         int api1CameraId,
-        const std::string& clientPackageName,
-        int clientUid,
-        int clientPid,
         int targetSdkVersion,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool forceSlowJpegMode,
-        int32_t deviceId,
+        const AttributionSourceState& clientAttribution,
         int32_t devicePolicy,
         /*out*/
         sp<ICamera>* device) {
     ATRACE_CALL();
     Status ret = Status::ok();
 
-    std::string cameraIdStr = cameraIdIntToStr(api1CameraId, deviceId, devicePolicy);
+    std::string cameraIdStr =
+            cameraIdIntToStr(api1CameraId, clientAttribution.deviceId, devicePolicy);
     if (cameraIdStr.empty()) {
         std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
-                api1CameraId, deviceId);
+                api1CameraId, clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
+    std::string clientPackageNameMaybe = clientAttribution.packageName.value_or("");
+    bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
+    std::string clientPackageName = resolvePackageName(clientAttribution.uid,
+            clientPackageNameMaybe);
+    logConnectionAttempt(clientAttribution.pid, clientPackageName, cameraIdStr, API_1);
+
+    int clientUid = clientAttribution.uid;
+    int clientPid = clientAttribution.pid;
+
+    // Resolve the client identity. In the near future, we will no longer rely on USE_CALLING_*, and
+    // need a way to guarantee the caller identity early.
+
+    // Check if we can trust clientUid
+    if (!resolveClientUid(clientUid)) {
+        return errorNotTrusted(clientPid, clientUid, cameraIdStr, clientPackageName,
+                /* isPid=*/ false);
+    }
+
+    // Check if we can trust clientUid
+    if (!resolveClientPid(clientPid)) {
+        return errorNotTrusted(clientPid, clientUid, cameraIdStr, clientPackageName,
+                /* isPid= */ true);
+    }
+
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
-            clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
+            clientPackageName, /*systemNativeClient*/ false, {},
+            clientUid, clientPid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            overrideToPortrait, forceSlowJpegMode, cameraIdStr, /*out*/client);
+            rotationOverride, forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*out*/client);
 
     if (!ret.isOk()) {
-        logRejected(cameraIdStr, getCallingPid(), clientPackageName, toStdString(ret.toString8()));
+        logRejected(cameraIdStr, getCallingPid(), clientAttribution.packageName.value_or(""),
+                toStdString(ret.toString8()));
         return ret;
     }
 
@@ -2416,49 +2239,64 @@
 Status CameraService::connectDevice(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const std::string& unresolvedCameraId,
-        const std::string& clientPackageName,
-        const std::optional<std::string>& clientFeatureId,
-        int clientUid, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, int32_t deviceId, int32_t devicePolicy,
+        int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
     ATRACE_CALL();
+    RunThreadWithRealtimePriority priorityBump;
     Status ret = Status::ok();
     sp<CameraDeviceClient> client = nullptr;
-    std::string clientPackageNameAdj = clientPackageName;
+    std::string clientPackageNameMaybe = clientAttribution.packageName.value_or("");
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
     bool systemNativeClient = false;
-    if (callerHasSystemUid() && (clientPackageNameAdj.size() == 0)) {
+    if (callerHasSystemUid() && (clientPackageNameMaybe.size() == 0)) {
         std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
-        clientPackageNameAdj = systemClient;
+        clientPackageNameMaybe = systemClient;
         systemNativeClient = true;
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, callingUid, clientPackageNameAdj);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     std::string cameraId = cameraIdOptional.value();
 
+    bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
+    std::string clientPackageName = resolvePackageName(clientAttribution.uid,
+            clientPackageNameMaybe);
+    logConnectionAttempt(clientAttribution.pid, clientPackageName, cameraId, API_2);
+
+    userid_t clientUserId = multiuser_get_user_id(clientAttribution.uid);
+    if (clientAttribution.uid == USE_CALLING_UID) {
+        clientUserId = multiuser_get_user_id(callingUid);
+    }
+
+    // Resolve the client identity. In the near future, we will no longer rely on USE_CALLING_*, and
+    // need a way to guarantee the caller identity early.
+
+    int clientUid = clientAttribution.uid;
+    int clientPid = callingPid;
+    // Check if we can trust clientUid
+    if (!resolveClientUid(clientUid)) {
+        return errorNotTrusted(clientPid, clientUid, cameraId, clientPackageName,
+                /* isPid= */ false);
+    }
+
     if (oomScoreOffset < 0) {
         std::string msg =
                 fmt::sprintf("Cannot increase the priority of a client %s pid %d for "
-                        "camera id %s", clientPackageNameAdj.c_str(), callingPid,
+                        "camera id %s", clientPackageName.c_str(), callingPid,
                         cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
-    userid_t clientUserId = multiuser_get_user_id(clientUid);
-    if (clientUid == USE_CALLING_UID) {
-        clientUserId = multiuser_get_user_id(callingUid);
-    }
-
     // Automotive privileged client AID_AUTOMOTIVE_EVS using exterior system camera for use cases
     // such as rear view and surround view cannot be disabled.
     if ((!isAutomotivePrivilegedClient(callingUid) || !isAutomotiveExteriorSystemCamera(cameraId))
@@ -2475,19 +2313,19 @@
             && !isTrustedCallingUid(callingUid)) {
         std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
-                        clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
+                        clientPackageName.c_str(), callingPid, cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(ERROR_PERMISSION_DENIED, msg.c_str());
     }
 
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
-            cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
-            clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, overrideToPortrait, /*forceSlowJpegMode*/false, unresolvedCameraId,
-            /*out*/client);
+            cameraId, /*api1CameraId*/-1, clientPackageName, systemNativeClient,
+            clientAttribution.attributionTag, clientUid, clientPid, API_2,
+            /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
+            /*forceSlowJpegMode*/false, unresolvedCameraId, isNonSystemNdk, /*out*/client);
 
     if (!ret.isOk()) {
-        logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
+        logRejected(cameraId, callingPid, clientPackageName, toStdString(ret.toString8()));
         return ret;
     }
 
@@ -2547,83 +2385,44 @@
     return false;
 }
 
-std::string CameraService::getPackageNameFromUid(int clientUid) {
-    std::string packageName("");
-
-    sp<IPermissionController> permCtrl;
-    if (flags::cache_permission_services()) {
-        permCtrl = getPermissionController();
-    } else {
-        sp<IServiceManager> sm = defaultServiceManager();
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wdeprecated-declarations"
-        // Using deprecated function to preserve functionality until the
-        // cache_permission_services flag is removed.
-        sp<IBinder> binder = sm->getService(toString16(kPermissionServiceName));
-#pragma clang diagnostic pop
-        if (binder == 0) {
-            ALOGE("Cannot get permission service");
-            permCtrl = nullptr;
-        } else {
-            permCtrl = interface_cast<IPermissionController>(binder);
-        }
-    }
-
-    if (permCtrl == nullptr) {
-        // Return empty package name and the further interaction
-        // with camera will likely fail
-        return packageName;
-    }
-
-    Vector<String16> packages;
-
-    permCtrl->getPackagesForUid(clientUid, packages);
-
-    if (packages.isEmpty()) {
-        ALOGE("No packages for calling UID %d", clientUid);
-        // Return empty package name and the further interaction
-        // with camera will likely fail
-        return packageName;
-    }
-
-    // Arbitrarily pick the first name in the list
-    packageName = toStdString(packages[0]);
-
-    return packageName;
-}
-
-template<class CALLBACK, class CLIENT>
-Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
-        int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
-        const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
-        apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
-        /*out*/sp<CLIENT>& device) {
-    binder::Status ret = binder::Status::ok();
-
-    bool isNonSystemNdk = false;
-    std::string clientPackageName;
-    int packageUid = (clientUid == USE_CALLING_UID) ?
-            getCallingUid() : clientUid;
-    if (clientPackageNameMaybe.size() <= 0) {
-        // NDK calls don't come with package names, but we need one for various cases.
-        // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
-        // do exist. For all authentication cases, all packages under the same UID get the
-        // same permissions, so picking any associated package name is sufficient. For some
-        // other cases, this may give inaccurate names for clients in logs.
-        isNonSystemNdk = true;
-        clientPackageName = getPackageNameFromUid(packageUid);
-    } else {
-        clientPackageName = clientPackageNameMaybe;
-    }
-
-    int originalClientPid = 0;
-
+void CameraService::logConnectionAttempt(int clientPid, const std::string& clientPackageName,
+        const std::string& cameraId, apiLevel effectiveApiLevel) const {
     int packagePid = (clientPid == USE_CALLING_PID) ?
         getCallingPid() : clientPid;
     ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
             "Camera API version %d", packagePid, clientPackageName.c_str(), cameraId.c_str(),
             static_cast<int>(effectiveApiLevel));
+}
+
+std::string CameraService::resolvePackageName(int clientUid,
+        const std::string& clientPackageNameMaybe) const {
+    if (clientPackageNameMaybe.size() <= 0) {
+        int packageUid = (clientUid == USE_CALLING_UID) ?
+                getCallingUid() : clientUid;
+        // NDK calls don't come with package names, but we need one for various cases.
+        // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
+        // do exist. For all authentication cases, all packages under the same UID get the
+        // same permissions, so picking any associated package name is sufficient. For some
+        // other cases, this may give inaccurate names for clients in logs.
+        return getPackageNameFromUid(packageUid);
+    } else {
+        return clientPackageNameMaybe;
+    }
+}
+
+template<class CALLBACK, class CLIENT>
+Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
+        int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
+        const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
+        apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, bool forceSlowJpegMode,
+        const std::string& originalCameraId, bool isNonSystemNdk, /*out*/sp<CLIENT>& device) {
+    binder::Status ret = binder::Status::ok();
+
+    int packageUid = (clientUid == USE_CALLING_UID) ?
+            getCallingUid() : clientUid;
+    int packagePid = (clientPid == USE_CALLING_PID) ?
+            getCallingPid() : clientPid;
 
     nsecs_t openTimeNs = systemTime();
 
@@ -2646,7 +2445,7 @@
 
         // Enforce client permissions and do basic validity checks
         if (!(ret = validateConnectLocked(cameraId, clientPackageName,
-                /*inout*/clientUid, /*inout*/clientPid, /*out*/originalClientPid)).isOk()) {
+                /*inout*/clientUid, /*inout*/clientPid)).isOk()) {
             return ret;
         }
 
@@ -2663,7 +2462,7 @@
 
         sp<BasicClient> clientTmp = nullptr;
         std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>> partial;
-        if ((err = handleEvictionsLocked(cameraId, originalClientPid, effectiveApiLevel,
+        if ((err = handleEvictionsLocked(cameraId, clientPid, effectiveApiLevel,
                 IInterface::asBinder(cameraCb), clientPackageName, oomScoreOffset,
                 systemNativeClient, /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) {
             switch (err) {
@@ -2697,7 +2496,7 @@
 
         int portraitRotation;
         auto deviceVersionAndTransport =
-                getDeviceVersion(cameraId, overrideToPortrait, /*out*/&portraitRotation,
+                getDeviceVersion(cameraId, rotationOverride, /*out*/&portraitRotation,
                         /*out*/&facing, /*out*/&orientation);
         if (facing == -1) {
             ALOGE("%s: Unable to get camera device \"%s\"  facing", __FUNCTION__, cameraId.c_str());
@@ -2708,11 +2507,14 @@
         sp<BasicClient> tmp = nullptr;
         bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                 mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
+
+        // Only use passed in clientPid to check permission. Use calling PID as the client PID
+        // that's connected to camera service directly.
         if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, api1CameraId, facing,
-                orientation, clientPid, clientUid, getpid(),
+                orientation, getCallingPid(), clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
-                overrideToPortrait, forceSlowJpegMode, originalCameraId,
+                rotationOverride, forceSlowJpegMode, originalCameraId,
                 /*out*/&tmp)).isOk()) {
             return ret;
         }
@@ -2776,7 +2578,7 @@
         CameraMetadata chars;
         bool rotateAndCropSupported = true;
         err = mCameraProviderManager->getCameraCharacteristics(cameraId, overrideForPerfClass,
-                &chars, overrideToPortrait);
+                &chars, rotationOverride);
         if (err == OK) {
             auto availableRotateCropEntry = chars.find(
                     ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES);
@@ -2792,7 +2594,8 @@
             // Set rotate-and-crop override behavior
             if (mOverrideRotateAndCropMode != ANDROID_SCALER_ROTATE_AND_CROP_AUTO) {
                 client->setRotateAndCropOverride(mOverrideRotateAndCropMode);
-            } else if (overrideToPortrait && portraitRotation != 0) {
+            } else if (rotationOverride != hardware::ICameraService::ROTATION_OVERRIDE_NONE &&
+                    portraitRotation != 0) {
                 uint8_t rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_AUTO;
                 switch (portraitRotation) {
                     case 90:
@@ -2808,6 +2611,8 @@
                         ALOGE("Unexpected portrait rotation: %d", portraitRotation);
                         break;
                 }
+                // Here we're communicating to the client the chosen rotate
+                // and crop mode to send to the HAL
                 client->setRotateAndCropOverride(rotateAndCropMode);
             } else {
                 client->setRotateAndCropOverride(
@@ -3003,8 +2808,8 @@
 }
 
 Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
-        int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
-        int32_t devicePolicy) {
+        int32_t torchStrength, const sp<IBinder>& clientBinder,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -3015,11 +2820,11 @@
     }
 
     int uid = getCallingUid();
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, uid);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -3141,7 +2946,8 @@
 }
 
 Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
-        const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy) {
+        const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -3152,11 +2958,11 @@
     }
 
     int uid = getCallingUid();
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy, uid);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -3431,7 +3237,6 @@
     return Status::ok();
 }
 
-// TODO(b/291736219): This to be made device-aware.
 Status CameraService::getConcurrentCameraIds(
         std::vector<ConcurrentCameraIdCombination>* concurrentCameraIds) {
     ATRACE_CALL();
@@ -3451,7 +3256,8 @@
     std::vector<std::unordered_set<std::string>> concurrentCameraCombinations =
             mCameraProviderManager->getConcurrentCameraIds();
     for (auto &combination : concurrentCameraCombinations) {
-        std::vector<std::string> validCombination;
+        std::vector<std::pair<std::string, int32_t>> validCombination;
+        int32_t firstDeviceId = kInvalidDeviceId;
         for (auto &cameraId : combination) {
             // if the camera state is not present, skip
             auto state = getCameraState(cameraId);
@@ -3466,7 +3272,17 @@
             if (shouldRejectSystemCameraConnection(cameraId)) {
                 continue;
             }
-            validCombination.push_back(cameraId);
+            auto [cameraOwnerDeviceId, mappedCameraId] =
+                    mVirtualDeviceCameraIdMapper.getDeviceIdAndMappedCameraIdPair(cameraId);
+            if (firstDeviceId == kInvalidDeviceId) {
+                firstDeviceId = cameraOwnerDeviceId;
+            } else if (firstDeviceId != cameraOwnerDeviceId) {
+                // Found an invalid combination which contains cameras with different device id's,
+                // hence discard it.
+                validCombination.clear();
+                break;
+            }
+            validCombination.push_back({mappedCameraId, cameraOwnerDeviceId});
         }
         if (validCombination.size() != 0) {
             concurrentCameraIds->push_back(std::move(validCombination));
@@ -3477,7 +3293,8 @@
 
 Status CameraService::isConcurrentSessionConfigurationSupported(
         const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
-        int targetSdkVersion, /*out*/bool* isSupported) {
+        int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        /*out*/bool* isSupported) {
     if (!isSupported) {
         ALOGE("%s: isSupported is NULL", __FUNCTION__);
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, "isSupported is NULL");
@@ -3489,12 +3306,26 @@
                 "Camera subsystem is not available");
     }
 
+    for (auto cameraIdAndSessionConfiguration : cameraIdsAndSessionConfigurations) {
+        std::optional<std::string> cameraIdOptional =
+                resolveCameraId(cameraIdAndSessionConfiguration.mCameraId,
+                        clientAttribution.deviceId, devicePolicy);
+        if (!cameraIdOptional.has_value()) {
+            std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
+                    cameraIdAndSessionConfiguration.mCameraId.c_str(), clientAttribution.deviceId);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+        cameraIdAndSessionConfiguration.mCameraId = cameraIdOptional.value();
+    }
+
     // Check for camera permissions
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
-    // TODO(b/291736219): Pass deviceId owning the camera if we make this method device-aware.
     bool hasCameraPermission = ((callingPid == getpid()) ||
-            hasPermissionsForCamera(callingPid, callingUid, kDefaultDeviceId));
+            hasPermissionsForCamera(callingPid, callingUid,
+                    devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT
+                        ? kDefaultDeviceId : clientAttribution.deviceId));
     if (!hasCameraPermission) {
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "android.permission.CAMERA needed to call"
@@ -3594,8 +3425,7 @@
                 [this, &isVendorListener, &clientPid, &clientUid](const hardware::CameraStatus& s) {
                         std::string cameraId = s.cameraId;
                         std::optional<std::string> cameraIdOptional = resolveCameraId(s.cameraId,
-                                s.deviceId, IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM,
-                                clientUid);
+                                s.deviceId, IVirtualDeviceManagerNative::DEVICE_POLICY_CUSTOM);
                         if (!cameraIdOptional.has_value()) {
                             std::string msg =
                                     fmt::sprintf(
@@ -3701,13 +3531,10 @@
     return ret;
 }
 
-Status CameraService::supportsCameraApi(const std::string& unresolvedCameraId, int apiVersion,
+Status CameraService::supportsCameraApi(const std::string& cameraId, int apiVersion,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    const std::string cameraId = resolveCameraId(
-            unresolvedCameraId, getCallingUid());
-
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
 
     switch (apiVersion) {
@@ -3721,7 +3548,10 @@
     }
 
     int portraitRotation;
-    auto deviceVersionAndTransport = getDeviceVersion(cameraId, false, &portraitRotation);
+    auto deviceVersionAndTransport =
+            getDeviceVersion(cameraId,
+                    /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                    &portraitRotation);
     if (deviceVersionAndTransport.first == -1) {
         std::string msg = fmt::sprintf("Unknown camera ID %s", cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
@@ -3766,13 +3596,10 @@
     return Status::ok();
 }
 
-Status CameraService::isHiddenPhysicalCamera(const std::string& unresolvedCameraId,
+Status CameraService::isHiddenPhysicalCamera(const std::string& cameraId,
         /*out*/ bool *isSupported) {
     ATRACE_CALL();
 
-    const std::string cameraId = resolveCameraId(unresolvedCameraId,
-            getCallingUid());
-
     ALOGV("%s: for camera ID = %s", __FUNCTION__, cameraId.c_str());
     *isSupported = mCameraProviderManager->isHiddenPhysicalCamera(cameraId);
 
@@ -4226,14 +4053,14 @@
         const std::string& cameraIdStr,
         int api1CameraId, int cameraFacing, int sensorOrientation,
         int clientPid, uid_t clientUid,
-        int servicePid, bool overrideToPortrait) :
+        int servicePid, int rotationOverride) :
         CameraService::BasicClient(cameraService,
                 IInterface::asBinder(cameraClient),
                 attributionAndPermissionUtils,
                 clientPackageName, systemNativeClient, clientFeatureId,
                 cameraIdStr, cameraFacing, sensorOrientation,
                 clientPid, clientUid,
-                servicePid, overrideToPortrait),
+                servicePid, rotationOverride),
         mCameraId(api1CameraId)
 {
     int callingPid = getCallingPid();
@@ -4264,7 +4091,7 @@
         const std::string& clientPackageName, bool nativeClient,
         const std::optional<std::string>& clientFeatureId, const std::string& cameraIdStr,
         int cameraFacing, int sensorOrientation, int clientPid, uid_t clientUid,
-        int servicePid, bool overrideToPortrait):
+        int servicePid, int rotationOverride):
         AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mDestructionStarted(false),
         mCameraIdStr(cameraIdStr), mCameraFacing(cameraFacing), mOrientation(sensorOrientation),
@@ -4273,7 +4100,7 @@
         mClientPid(clientPid), mClientUid(clientUid),
         mServicePid(servicePid),
         mDisconnected(false), mUidIsTrusted(false),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mAudioRestriction(hardware::camera2::ICameraDeviceUser::AUDIO_RESTRICTION_NONE),
         mRemoteBinder(remoteCallback),
         mOpsActive(false),
@@ -4444,8 +4271,10 @@
         // connection has been fully established and at that time camera muting
         // capabilities are unknown.
         if (!isUidActive || !isCameraPrivacyEnabled) {
-            ALOGI("Camera %s: Access for \"%s\" has been restricted",
-                    mCameraIdStr.c_str(), mClientPackageName.c_str());
+            ALOGI("Camera %s: Access for \"%s\" has been restricted."
+                    "uid active: %s, privacy enabled: %s", mCameraIdStr.c_str(),
+                    mClientPackageName.c_str(), isUidActive ? "true" : "false",
+                    isCameraPrivacyEnabled ? "true" : "false");
             // Return the same error as for device policy manager rejection
             return -EACCES;
         }
@@ -4464,14 +4293,9 @@
         // Notify app ops that the camera is not available
         mOpsCallback = new OpsCallback(this);
 
-        if (flags::watch_foreground_changes()) {
-            mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
-                toString16(mClientPackageName),
-                AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
-        } else {
-            mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
-                toString16(mClientPackageName), mOpsCallback);
-        }
+        mAppOpsManager->startWatchingMode(AppOpsManager::OP_CAMERA,
+            toString16(mClientPackageName),
+            AppOpsManager::WATCH_FOREGROUND_CHANGES, mOpsCallback);
 
         // Just check for camera acccess here on open - delay startOp until
         // camera frames start streaming in startCameraStreamingOps
@@ -4654,19 +4478,10 @@
         // (WAR for b/175320666)the AppOpsManager could return MODE_IGNORED. Do not treat such
         // cases as error.
         if (!mUidIsTrusted) {
-            if (flags::watch_foreground_changes()) {
-                if (isUidVisible && isCameraPrivacyEnabled && supportsCameraMute()) {
-                    setCameraMute(true);
-                } else {
-                    block();
-                }
+            if (isUidVisible && isCameraPrivacyEnabled && supportsCameraMute()) {
+                setCameraMute(true);
             } else {
-                if (isUidActive && isCameraPrivacyEnabled && supportsCameraMute()) {
-                    setCameraMute(true);
-                } else if (!isUidActive
-                    || (isCameraPrivacyEnabled && !supportsCameraMute())) {
-                    block();
-                }
+                block();
             }
         }
     } else if (res == AppOpsManager::MODE_ALLOWED) {
@@ -5112,7 +4927,7 @@
 
 bool CameraService::SensorPrivacyPolicy::isCameraPrivacyEnabled(const String16& packageName) {
     if (!hasCameraPrivacyFeature()) {
-        return SensorPrivacyManager::DISABLED;
+        return false;
     }
     return mSpm.isCameraPrivacyEnabled(packageName);
 }
@@ -5327,7 +5142,7 @@
     }
     if (hasAny) ret << "\n";
     ret << "]\n";
-    return std::move(ret.str());
+    return ret.str();
 }
 
 CameraService::DescriptorPtr CameraService::CameraClientManager::makeClientDescriptor(
@@ -5802,7 +5617,7 @@
     if (vd_flags::camera_device_awareness() && status == StatusInternal::PRESENT) {
         CameraMetadata cameraInfo;
         status_t res = mCameraProviderManager->getCameraCharacteristics(
-                cameraId, false, &cameraInfo, false);
+                cameraId, false, &cameraInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res != OK) {
             ALOGW("%s: Not able to get camera characteristics for camera id %s",
                   __FUNCTION__, cameraId.c_str());
@@ -5876,26 +5691,6 @@
                             "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
                             __FUNCTION__, listener->getListenerUid(), listener->getListenerPid(),
                             ret.exceptionCode());
-
-                    // Only cameras of the default device can be remapped to a different camera
-                    // (using remapCameraIds method), so do the following only if the camera is
-                    // associated with the default device.
-                    if (deviceId == kDefaultDeviceId) {
-                        // For the default device, also trigger the callbacks for cameras that were
-                        // remapped to the current cameraId for the specific package that this
-                        // listener belongs to.
-                        std::vector<std::string> remappedCameraIds =
-                                findOriginalIdsForRemappedCameraId(cameraId,
-                                        listener->getListenerUid());
-                        for (auto &remappedCameraId: remappedCameraIds) {
-                            ret = listener->getListener()->onStatusChanged(
-                                    mapToInterface(status), remappedCameraId, kDefaultDeviceId);
-                            listener->handleBinderStatus(ret,
-                                    "%s: Failed to trigger onStatusChanged callback for %d:%d: %d",
-                                    __FUNCTION__, listener->getListenerUid(),
-                                    listener->getListenerPid(), ret.exceptionCode());
-                        }
-                    }
                 }
             });
 }
@@ -5953,7 +5748,7 @@
     }
 
     ALOGV("%s: Status has changed for camera ID %s from %#x to %#x", __FUNCTION__,
-            cameraId.c_str(), oldStatus, status);
+            cameraId.c_str(), eToI(oldStatus), eToI(status));
 
     if (oldStatus == StatusInternal::NOT_PRESENT &&
             (status != StatusInternal::PRESENT &&
@@ -6126,8 +5921,6 @@
         return handleWatchCommand(args, in, out);
     } else if (args.size() >= 2 && args[0] == toString16("set-watchdog")) {
         return handleSetCameraServiceWatchdog(args);
-    } else if (args.size() >= 4 && args[0] == toString16("remap-camera-id")) {
-        return handleCameraIdRemapping(args, err);
     } else if (args.size() == 1 && args[0] == toString16("help")) {
         printHelp(out);
         return OK;
@@ -6136,23 +5929,6 @@
     return BAD_VALUE;
 }
 
-status_t CameraService::handleCameraIdRemapping(const Vector<String16>& args, int err) {
-    uid_t uid = IPCThreadState::self()->getCallingUid();
-    if (uid != AID_ROOT) {
-        dprintf(err, "Must be adb root\n");
-        return PERMISSION_DENIED;
-    }
-    if (args.size() != 4) {
-        dprintf(err, "Expected format: remap-camera-id <PACKAGE> <Id0> <Id1>\n");
-        return BAD_VALUE;
-    }
-    std::string packageName = toStdString(args[1]);
-    std::string cameraIdToReplace = toStdString(args[2]);
-    std::string cameraIdNew = toStdString(args[3]);
-    remapCameraIds({{packageName, {{cameraIdToReplace, cameraIdNew}}}});
-    return OK;
-}
-
 status_t CameraService::handleSetUidState(const Vector<String16>& args, int err) {
     std::string packageName = toStdString(args[1]);
 
@@ -6769,7 +6545,6 @@
         "  set-watchdog <VALUE> enables or disables the camera service watchdog\n"
         "      Valid values 0=disable, 1=enable\n"
         "  watch <start|stop|dump|print|clear> manages tag monitoring in connected clients\n"
-        "  remap-camera-id <PACKAGE> <Id0> <Id1> remaps camera ids. Must use adb root\n"
         "  help print this message\n");
 }
 
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 8dbd591..80bd783 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -21,7 +21,6 @@
 #include <android/hardware/BnCameraService.h>
 #include <android/hardware/BnSensorPrivacyListener.h>
 #include <android/hardware/ICameraServiceListener.h>
-#include <android/hardware/CameraIdRemapping.h>
 #include <android/hardware/camera2/BnCameraInjectionSession.h>
 #include <android/hardware/camera2/ICameraInjectionCallback.h>
 
@@ -34,7 +33,6 @@
 #include <binder/IServiceManager.h>
 #include <binder/IActivityManager.h>
 #include <binder/IAppOpsCallback.h>
-#include <binder/IPermissionController.h>
 #include <binder/IUidObserver.h>
 #include <hardware/camera.h>
 #include <sensorprivacy/SensorPrivacyManager.h>
@@ -153,15 +151,18 @@
     // ICameraService
     // IMPORTANT: All binder calls that deal with logicalCameraId should use
     // resolveCameraId(logicalCameraId, deviceId, devicePolicy) to arrive at the correct
-    // cameraId to perform the operation on (in case of Id Remapping, or in case of contexts
+    // cameraId to perform the operation on (in case of contexts
     // associated with virtual devices).
-    virtual binder::Status     getNumberOfCameras(int32_t type, int32_t deviceId,
+    virtual binder::Status     getNumberOfCameras(int32_t type,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, int32_t* numCameras);
 
-    virtual binder::Status     getCameraInfo(int cameraId, bool overrideToPortrait,
-            int32_t deviceId, int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
+    virtual binder::Status     getCameraInfo(int cameraId, int rotationOverride,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const std::string& cameraId,
-            int targetSdkVersion, bool overrideToPortrait, int32_t deviceId,
+            int targetSdkVersion, int rotationOverride,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
@@ -171,17 +172,15 @@
             hardware::camera2::params::VendorTagDescriptorCache* cache);
 
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
-            int32_t cameraId, const std::string& clientPackageName,
-            int32_t clientUid, int clientPid, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode, int32_t deviceId,
+            int32_t cameraId, int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, /*out*/ sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-            const std::string& cameraId,
-            const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
-            int32_t clientUid, int scoreOffset, int targetSdkVersion, bool overrideToPortrait,
-            int32_t deviceId, int32_t devicePolicy,
+            const std::string& cameraId, int scoreOffset, int targetSdkVersion,
+            int rotationOverride, const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -191,14 +190,14 @@
     virtual binder::Status    removeListener(
             const sp<hardware::ICameraServiceListener>& listener);
 
-    // TODO(b/291736219): This to be made device-aware.
     virtual binder::Status getConcurrentCameraIds(
         /*out*/
         std::vector<hardware::camera2::utils::ConcurrentCameraIdCombination>* concurrentCameraIds);
 
     virtual binder::Status isConcurrentSessionConfigurationSupported(
         const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>& sessions,
-        int targetSdkVersion, /*out*/bool* supported);
+        int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        /*out*/bool* supported);
 
     virtual binder::Status    getLegacyParameters(
             int32_t cameraId,
@@ -206,13 +205,16 @@
             std::string* parameters);
 
     virtual binder::Status    setTorchMode(const std::string& cameraId, bool enabled,
-            const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy);
-
-    virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
-            int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
+            const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
             int32_t devicePolicy);
 
-    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId, int32_t deviceId,
+    virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
+            int32_t torchStrength, const sp<IBinder>& clientBinder,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy);
+
+    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, int32_t* torchStrength);
 
     virtual binder::Status    notifySystemEvent(int32_t eventId,
@@ -243,27 +245,25 @@
     virtual binder::Status reportExtensionSessionStats(
             const hardware::CameraExtensionSessionStats& stats, std::string* sessionKey /*out*/);
 
-    virtual binder::Status remapCameraIds(const hardware::CameraIdRemapping&
-            cameraIdRemapping);
-
     virtual binder::Status injectSessionParams(
             const std::string& cameraId,
             const hardware::camera2::impl::CameraMetadataNative& sessionParams);
 
     virtual binder::Status createDefaultRequest(const std::string& cameraId, int templateId,
-            int32_t deviceId, int32_t devicePolicy,
+            const AttributionSourceState& clientAttribution, int32_t devicePolicy,
             /*out*/
             hardware::camera2::impl::CameraMetadataNative* request);
 
     virtual binder::Status isSessionConfigurationWithParametersSupported(
             const std::string& cameraId, int targetSdkVersion,
             const SessionConfiguration& sessionConfiguration,
-            int32_t deviceId, int32_t devicePolicy,
+            const AttributionSourceState& clientAttribution, int32_t devicePolicy,
             /*out*/ bool* supported);
 
     virtual binder::Status getSessionCharacteristics(
-            const std::string& cameraId, int targetSdkVersion, bool overrideToPortrait,
-            const SessionConfiguration& sessionConfiguration, int32_t deviceId,
+            const std::string& cameraId, int targetSdkVersion, int rotationOverride,
+            const SessionConfiguration& sessionConfiguration,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, /*out*/ CameraMetadata* outMetadata);
 
     // Extra permissions checks
@@ -307,7 +307,8 @@
     /////////////////////////////////////////////////////////////////////
     // CameraDeviceFactory functionality
     std::pair<int, IPCTransport>    getDeviceVersion(const std::string& cameraId,
-            bool overrideToPortrait, int* portraitRotation,
+            int rotationOverride,
+            int* portraitRotation,
             int* facing = nullptr, int* orientation = nullptr);
 
     /////////////////////////////////////////////////////////////////////
@@ -357,7 +358,7 @@
         }
 
         bool getOverrideToPortrait() const {
-            return mOverrideToPortrait;
+            return mRotationOverride == ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
         }
 
         // Disallows dumping over binder interface
@@ -461,7 +462,7 @@
                 int clientPid,
                 uid_t clientUid,
                 int servicePid,
-                bool overrideToPortrait);
+                int rotationOverride);
 
         virtual ~BasicClient();
 
@@ -484,7 +485,7 @@
         const pid_t                     mServicePid;
         bool                            mDisconnected;
         bool                            mUidIsTrusted;
-        bool                            mOverrideToPortrait;
+        int                             mRotationOverride;
 
         mutable Mutex                   mAudioRestrictionLock;
         int32_t                         mAudioRestriction;
@@ -576,7 +577,7 @@
                 int clientPid,
                 uid_t clientUid,
                 int servicePid,
-                bool overrideToPortrait);
+                int rotationOverride);
         ~Client();
 
         // return our camera client
@@ -684,25 +685,6 @@
         return activityManager;
     }
 
-    static const sp<IPermissionController>& getPermissionController() {
-        static const char* kPermissionControllerService = "permission";
-        static thread_local sp<IPermissionController> sPermissionController = nullptr;
-
-        if (sPermissionController == nullptr ||
-                !IInterface::asBinder(sPermissionController)->isBinderAlive()) {
-            sp<IServiceManager> sm = defaultServiceManager();
-            sp<IBinder> binder = sm->checkService(toString16(kPermissionControllerService));
-            if (binder == nullptr) {
-                ALOGE("%s: Could not get permission service", __FUNCTION__);
-                sPermissionController = nullptr;
-            } else {
-                sPermissionController = interface_cast<IPermissionController>(binder);
-            }
-        }
-
-        return sPermissionController;
-    }
-
     /**
      * Typesafe version of device status, containing both the HAL-layer and the service interface-
      * layer values.
@@ -945,17 +927,19 @@
     void removeStates(const std::string& id);
 
     // Check if we can connect, before we acquire the service lock.
-    // The returned originalClientPid is the PID of the original process that wants to connect to
-    // camera.
-    // The returned clientPid is the PID of the client that directly connects to camera.
-    // originalClientPid and clientPid are usually the same except when the application uses
-    // mediaserver to connect to camera (using MediaRecorder to connect to camera). In that case,
-    // clientPid is the PID of mediaserver and originalClientPid is the PID of the application.
+    // If clientPid/clientUid are USE_CALLING_PID/USE_CALLING_UID, they will be overwritten with
+    // the calling pid/uid.
     binder::Status validateConnectLocked(const std::string& cameraId, const std::string& clientName,
-            /*inout*/int& clientUid, /*inout*/int& clientPid, /*out*/int& originalClientPid) const;
+            int clientUid, int clientPid) const;
     binder::Status validateClientPermissionsLocked(const std::string& cameraId,
-            const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid,
-            /*out*/int& originalClientPid) const;
+            const std::string& clientName, int clientUid, int clientPid) const;
+
+    // If clientPackageNameMaybe is empty, attempts to resolve the package name.
+    std::string resolvePackageName(int clientUid, const std::string& clientPackageNameMaybe) const;
+    void logConnectionAttempt(int clientPid, const std::string& clientPackageName,
+        const std::string& cameraId, apiLevel effectiveApiLevel) const;
+    binder::Status errorNotTrusted(int clientPid, int clientUid, const std::string& cameraId,
+            const std::string& clientName, bool isPid) const;
 
     bool isCameraPrivacyEnabled(const String16& packageName,const std::string& cameraId,
            int clientPid, int ClientUid);
@@ -993,22 +977,14 @@
     // sorted in alpha-numeric order.
     void filterAPI1SystemCameraLocked(const std::vector<std::string> &normalDeviceIds);
 
-    // In some cases the calling code has no access to the package it runs under.
-    // For example, NDK camera API.
-    // In this case we will get the packages for the calling UID and pick the first one
-    // for attributing the app op. This will work correctly for runtime permissions
-    // as for legacy apps we will toggle the app op for all packages in the UID.
-    // The caveat is that the operation may be attributed to the wrong package and
-    // stats based on app ops may be slightly off.
-    std::string getPackageNameFromUid(int clientUid);
-
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
-            int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
+            int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
             const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
-            bool overrideToPortrait, bool forceSlowJpegMode, const std::string& originalCameraId,
+            int rotationOverride, bool forceSlowJpegMode,
+            const std::string& originalCameraId, bool isNonSystemNdk,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
@@ -1041,43 +1017,9 @@
     mutable Mutex mCameraStatesLock;
 
     /**
-     * Mapping from packageName -> {cameraIdToReplace -> newCameraIdtoUse}.
-     *
-     * This specifies that for packageName, for every binder operation targeting
-     * cameraIdToReplace, use newCameraIdToUse instead.
-     */
-    typedef std::map<std::string, std::map<std::string, std::string>> TCameraIdRemapping;
-    TCameraIdRemapping mCameraIdRemapping{};
-    /** Mutex guarding mCameraIdRemapping. */
-    Mutex mCameraIdRemappingLock;
-
-    /** Parses cameraIdRemapping parcelable into the native cameraIdRemappingMap. */
-    binder::Status parseCameraIdRemapping(
-            const hardware::CameraIdRemapping& cameraIdRemapping,
-            /* out */ TCameraIdRemapping* cameraIdRemappingMap);
-
-    /**
-     * Resolve the (potentially remapped) camera id to use for packageName for the default device
-     * context.
-     *
-     * This returns the Camera id to use in case inputCameraId was remapped to a
-     * different id for the given packageName. Otherwise, it returns the inputCameraId.
-     *
-     * If the packageName is not provided, it will be inferred from the clientUid.
-     */
-    std::string resolveCameraId(
-            const std::string& inputCameraId,
-            int clientUid,
-            const std::string& packageName = "");
-
-    /**
      * Resolve the (potentially remapped) camera id for the given input camera id and the given
      * device id and device policy (for the device associated with the context of the caller).
      *
-     * For any context associated with the default device or a virtual device with default camera
-     * policy, this will return the actual camera id (in case inputCameraId was remapped using
-     * the remapCameraIds method).
-     *
      * For any context associated with a virtual device with custom camera policy, this will return
      * the actual camera id if inputCameraId corresponds to the mapped id of a virtual camera
      * (for virtual devices with custom camera policy, the back and front virtual cameras of that
@@ -1086,20 +1028,7 @@
     std::optional<std::string> resolveCameraId(
             const std::string& inputCameraId,
             int32_t deviceId,
-            int32_t devicePolicy,
-            int clientUid,
-            const std::string& packageName = "");
-
-    /**
-     * Updates the state of mCameraIdRemapping, while disconnecting active clients as necessary.
-     */
-    void remapCameraIds(const TCameraIdRemapping& cameraIdRemapping);
-
-    /**
-     * Finds the Camera Ids that were remapped to the inputCameraId for the given client.
-     */
-    std::vector<std::string> findOriginalIdsForRemappedCameraId(
-        const std::string& inputCameraId, int clientUid);
+            int32_t devicePolicy);
 
     // Circular buffer for storing event logging for dumps
     RingBuffer<std::string> mEventLog;
@@ -1549,7 +1478,7 @@
             const std::string& cameraId, int api1CameraId, int facing, int sensorOrientation,
             int clientPid, uid_t clientUid, int servicePid,
             std::pair<int, IPCTransport> deviceVersionAndIPCTransport, apiLevel effectiveApiLevel,
-            bool overrideForPerfClass, bool overrideToPortrait, bool forceSlowJpegMode,
+            bool overrideForPerfClass, int rotationOverride, bool forceSlowJpegMode,
             const std::string& originalCameraId,
             /*out*/ sp<BasicClient>* client);
 
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.cpp b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
index 1c1bd24..ad1a84f 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.cpp
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.cpp
@@ -17,11 +17,14 @@
 #define LOG_TAG "CameraServiceWatchdog"
 
 #include "CameraServiceWatchdog.h"
+#include "com_android_internal_camera_flags.h"
 #include "android/set_abort_message.h"
 #include "utils/CameraServiceProxyWrapper.h"
 
 namespace android {
 
+namespace flags = com::android::internal::camera::flags;
+
 bool CameraServiceWatchdog::threadLoop()
 {
     {
@@ -51,6 +54,12 @@
                         true /*deviceError*/);
                 // We use abort here so we can get a tombstone for better
                 // debugging.
+                if (flags::enable_hal_abort_from_cameraservicewatchdog()) {
+                    for (pid_t pid : mProviderPids) {
+                        kill(pid, SIGABRT);
+                    }
+                }
+
                 abort();
             }
         }
diff --git a/services/camera/libcameraservice/CameraServiceWatchdog.h b/services/camera/libcameraservice/CameraServiceWatchdog.h
index afc432d..691a274 100644
--- a/services/camera/libcameraservice/CameraServiceWatchdog.h
+++ b/services/camera/libcameraservice/CameraServiceWatchdog.h
@@ -26,10 +26,11 @@
  *   and single call monitoring differently. See function documentation for
  *   more details.
  * To disable/enable:
- *   - adb shell cmd media.camera set-cameraservice-watchdog [0/1]
+ *   - adb shell cmd media.camera set-watchdog [0/1]
  */
 #pragma once
 #include <chrono>
+#include <set>
 #include <thread>
 #include <time.h>
 #include <utils/Thread.h>
@@ -57,16 +58,17 @@
 };
 
 public:
-    explicit CameraServiceWatchdog(const std::string &cameraId,
+
+    explicit CameraServiceWatchdog(const std::set<pid_t> &pids, const std::string &cameraId,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
-                    mCameraId(cameraId), mPause(true), mMaxCycles(kMaxCycles),
+                    mProviderPids(pids), mCameraId(cameraId), mPause(true), mMaxCycles(kMaxCycles),
                     mCycleLengthMs(kCycleLengthMs), mEnabled(true),
                     mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
 
-    explicit CameraServiceWatchdog (const std::string &cameraId, size_t maxCycles,
-            uint32_t cycleLengthMs, bool enabled,
+    explicit CameraServiceWatchdog (const std::set<pid_t> &pids, const std::string &cameraId,
+            size_t maxCycles, uint32_t cycleLengthMs, bool enabled,
             std::shared_ptr<CameraServiceProxyWrapper> cameraServiceProxyWrapper) :
-                    mCameraId(cameraId), mPause(true), mMaxCycles(maxCycles),
+                    mProviderPids(pids), mCameraId(cameraId), mPause(true), mMaxCycles(maxCycles),
                     mCycleLengthMs(cycleLengthMs), mEnabled(enabled),
                     mCameraServiceProxyWrapper(cameraServiceProxyWrapper) {};
 
@@ -90,7 +92,8 @@
             // Lock for mEnabled
             mEnabledLock.lock();
             sp<CameraServiceWatchdog> tempWatchdog = new CameraServiceWatchdog(
-                    mCameraId, cycles, cycleLength, mEnabled, mCameraServiceProxyWrapper);
+                    mProviderPids, mCameraId, cycles, cycleLength, mEnabled,
+                    mCameraServiceProxyWrapper);
             mEnabledLock.unlock();
 
             status_t status = tempWatchdog->run("CameraServiceWatchdog");
@@ -150,6 +153,7 @@
     Mutex           mWatchdogLock;      // Lock for condition variable
     Mutex           mEnabledLock;       // Lock for enabled status
     Condition       mWatchdogCondition; // Condition variable for stop/start
+    std::set<pid_t> mProviderPids;      // Process ID set of camera providers
     std::string     mCameraId;          // Camera Id the watchdog belongs to
     bool            mPause;             // True if tid map is empty
     uint32_t        mMaxCycles;         // Max cycles
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index cb11023..7f674bd 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -28,6 +28,7 @@
 #include <binder/Status.h>
 #include <camera/CameraUtils.h>
 #include <hidl/HidlTransportSupport.h>
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
@@ -38,6 +39,7 @@
 using ::android::hardware::cameraservice::utils::conversion::aidl::cloneToAidl;
 using ::android::hardware::cameraservice::utils::conversion::aidl::convertToAidl;
 using ::android::hardware::cameraservice::utils::conversion::aidl::filterVndkKeys;
+using hardware::BnCameraService::ROTATION_OVERRIDE_NONE;
 using ::ndk::ScopedAStatus;
 
 // VNDK classes
@@ -88,10 +90,15 @@
     if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
 
     ::android::CameraMetadata cameraMetadata;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
     UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
                                                            mVndkVersion,
-                                                           /* overrideToPortrait= */ false,
-                                                           kDefaultDeviceId,
+                                                           ROTATION_OVERRIDE_NONE,
+                                                           clientAttribution,
                                                            /* devicePolicy= */ 0,
                                                            &cameraMetadata);
     if (!ret.isOk()) {
@@ -142,16 +149,20 @@
         return fromSStatus(SStatus::UNKNOWN_ERROR);
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
     binder::Status serviceRet = mCameraService->connectDevice(
             callbacks,
             in_cameraId,
-            std::string(),
-            /* clientFeatureId= */{},
-            hardware::ICameraService::USE_CALLING_UID,
             /* scoreOffset= */ 0,
             /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
-            /* overrideToPortrait= */ false,
-            kDefaultDeviceId,
+            ROTATION_OVERRIDE_NONE,
+            clientAttribution,
             /* devicePolicy= */ 0,
             &unstableDevice);
     if (!serviceRet.isOk()) {
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index 48d78e1..1ec5072 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -15,6 +15,7 @@
  */
 
 #define LOG_TAG "AidlUtils"
+//#define LOG_NDEBUG 0
 
 #include <aidl/AidlUtils.h>
 #include <aidl/ExtensionMetadataTags.h>
@@ -25,6 +26,7 @@
 #include <device3/Camera3StreamInterface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
+#include "utils/Utils.h"
 
 namespace android::hardware::cameraservice::utils::conversion::aidl {
 
@@ -314,6 +316,7 @@
     if (vndkVersion == __ANDROID_API_FUTURE__) {
         // VNDK version derived from ro.board.api_level is a version code-name that
         // corresponds to the current SDK version.
+        ALOGV("%s: VNDK version is API FUTURE, not filtering any keys", __FUNCTION__);
         return OK;
     }
     const auto &apiLevelToKeys =
@@ -322,9 +325,14 @@
     // versions above the given one, need to have their keys filtered from the
     // metadata in order to avoid metadata invalidation.
     auto it = apiLevelToKeys.upper_bound(vndkVersion);
+    ALOGV("%s: VNDK version for filtering is %d", __FUNCTION__ , vndkVersion);
     while (it != apiLevelToKeys.end()) {
         for (const auto &key : it->second) {
             status_t res = metadata.erase(key);
+            // Should be okay to not use get_local_camera_metadata_tag_name
+            // since we're not filtering vendor tags
+            ALOGV("%s: Metadata key being filtered is %s", __FUNCTION__ ,
+                    get_camera_metadata_tag_name(key));
             if (res != OK) {
                 ALOGE("%s metadata key %d could not be erased", __FUNCTION__, key);
                 return res;
@@ -340,8 +348,8 @@
     // Ensure the vendor ID are the same before attempting
     // anything else. If vendor IDs differ we cannot safely copy the characteristics.
     if (from.getVendorId() != to->getVendorId()) {
-        ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %lu; To: %lu",
-              __FUNCTION__, from.getVendorId(), to->getVendorId());
+        ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %" PRIu64
+              "; To: %" PRIu64, __FUNCTION__, from.getVendorId(), to->getVendorId());
         return BAD_VALUE;
     }
 
@@ -357,7 +365,7 @@
     for (size_t i = 0; i < get_camera_metadata_entry_count(src); i++) {
         int ret = get_camera_metadata_ro_entry(src, i, &entry);
         if (ret != OK) {
-            ALOGE("%s: Could not fetch entry at index %lu. Error: %d", __FUNCTION__, i, ret);
+            ALOGE("%s: Could not fetch entry at index %zu. Error: %d", __FUNCTION__, i, ret);
             from.unlock(src);
             return BAD_VALUE;
         }
diff --git a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
index 86af36c..61b150d 100644
--- a/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/ExtensionMetadataTags.h
@@ -30,13 +30,4 @@
 std::vector<camera_metadata_tag> extension_metadata_keys{
             ANDROID_EXTENSION_STRENGTH,
             ANDROID_EXTENSION_CURRENT_TYPE,
-            ANDROID_EFV_PADDING_ZOOM_FACTOR,
-            ANDROID_EFV_AUTO_ZOOM,
-            ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
-            ANDROID_EFV_STABILIZATION_MODE,
-            ANDROID_EFV_TRANSLATE_VIEWPORT,
-            ANDROID_EFV_ROTATE_VIEWPORT,
-            ANDROID_EFV_PADDING_REGION,
-            ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
-            ANDROID_EFV_TARGET_COORDINATES,
 };
diff --git a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
index 0e1db5c..b07d8d5 100644
--- a/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
+++ b/services/camera/libcameraservice/aidl/VndkVersionMetadataTags.h
@@ -87,7 +87,6 @@
         } },
       {35, {
           ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
-          ANDROID_EFV_PADDING_ZOOM_FACTOR_RANGE,
           ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
           ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
           ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
@@ -121,15 +120,6 @@
         }  },
       {35, {
           ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
-          ANDROID_EFV_AUTO_ZOOM,
-          ANDROID_EFV_AUTO_ZOOM_PADDING_REGION,
-          ANDROID_EFV_MAX_PADDING_ZOOM_FACTOR,
-          ANDROID_EFV_PADDING_REGION,
-          ANDROID_EFV_PADDING_ZOOM_FACTOR,
-          ANDROID_EFV_ROTATE_VIEWPORT,
-          ANDROID_EFV_STABILIZATION_MODE,
-          ANDROID_EFV_TARGET_COORDINATES,
-          ANDROID_EFV_TRANSLATE_VIEWPORT,
           ANDROID_FLASH_STRENGTH_LEVEL,
           ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION,
           ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES,
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 8ec2e4f..861414f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -24,11 +24,12 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
+#include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
 #include <gui/Surface.h>
-#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 
 #include "api1/Camera2Client.h"
 
@@ -50,6 +51,8 @@
 namespace android {
 using namespace camera2;
 
+namespace flags = com::android::internal::camera::flags;
+
 // Interface used by CameraService
 
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
@@ -66,13 +69,13 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool forceSlowJpegMode):
         Camera2ClientBase(cameraService, cameraClient, cameraServiceProxyWrapper,
                 attributionAndPermissionUtils, clientPackageName,
                 false/*systemNativeClient - since no ndk for api1*/, clientFeatureId,
                 cameraDeviceId, api1CameraId, cameraFacing, sensorOrientation, clientPid,
-                clientUid, servicePid, overrideForPerfClass, overrideToPortrait,
+                clientUid, servicePid, overrideForPerfClass, rotationOverride,
                 /*legacyClient*/ true),
         mParameters(api1CameraId, cameraFacing),
         mLatestRequestIds(kMaxRequestIds),
@@ -420,7 +423,7 @@
         result << "    none\n";
     }
 
-    std::string resultStr = std::move(result.str());
+    std::string resultStr = result.str();
 
     write(fd, resultStr.c_str(), resultStr.size());
 
@@ -501,7 +504,16 @@
     bool hasDeviceError = mDevice->hasDeviceError();
     mDevice->disconnect();
 
-    CameraService::Client::disconnect();
+    if (flags::api1_release_binderlock_before_cameraservice_disconnect()) {
+        // CameraService::Client::disconnect calls CameraService which attempts to lock
+        // CameraService's mServiceLock. This might lead to a deadlock if the cameraservice is
+        // currently waiting to lock mSerializationLock on another thread.
+        mBinderSerializationLock.unlock();
+        CameraService::Client::disconnect();
+        mBinderSerializationLock.lock();
+    } else {
+        CameraService::Client::disconnect();
+    }
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
     mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
diff --git a/services/camera/libcameraservice/api1/Camera2Client.h b/services/camera/libcameraservice/api1/Camera2Client.h
index 2654a25..a0c9f2d 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.h
+++ b/services/camera/libcameraservice/api1/Camera2Client.h
@@ -114,7 +114,7 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait,
+            int rotationOverride,
             bool forceSlowJpegMode);
 
     virtual ~Camera2Client();
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 17db20b..2fbf49e 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -18,9 +18,10 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -113,6 +114,12 @@
     if (!mCallbackToApp && mCallbackConsumer == 0) {
         // Create CPU buffer queue endpoint, since app hasn't given us one
         // Make it async to avoid disconnect deadlocks
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mCallbackConsumer = new CpuConsumer(kCallbackHeapCount);
+        mCallbackConsumer->setFrameAvailableListener(this);
+        mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
+        mCallbackWindow = mCallbackConsumer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -120,6 +127,7 @@
         mCallbackConsumer->setFrameAvailableListener(this);
         mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
         mCallbackWindow = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
     if (mCallbackStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index eb00bf8..3a0489c 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -25,9 +25,10 @@
 
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -93,6 +94,12 @@
 
     if (mCaptureConsumer == 0) {
         // Create CPU buffer queue endpoint
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mCaptureConsumer = new CpuConsumer(1);
+        mCaptureConsumer->setFrameAvailableListener(this);
+        mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
+        mCaptureWindow = mCaptureConsumer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -100,6 +107,7 @@
         mCaptureConsumer->setFrameAvailableListener(this);
         mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
         mCaptureWindow = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
     // Since ashmem heaps are rounded up to page size, don't reallocate if
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index d6c2415..0f1d0ff 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -27,10 +27,11 @@
 
 #include <inttypes.h>
 
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
-#include <camera/StringUtils.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -141,8 +142,12 @@
         mHasFocuser(false),
         mInputBuffer(nullptr),
         mProducer(nullptr),
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+        mInputSurface(nullptr),
+#else
         mInputProducer(nullptr),
         mInputProducerSlot(-1),
+#endif
         mBuffersToDetach(0) {
     // Initialize buffer queue and frame list based on pipeline max depth.
     size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
@@ -250,7 +255,11 @@
     if (mZslStreamId == NO_STREAM) {
         // Create stream for HAL production
         // TODO: Sort out better way to select resolution for ZSL
-
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mProducer = new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, mBufferQueueDepth);
+        mProducer->setName("Camera2-ZslRingBufferConsumer");
+        sp<Surface> outSurface = mProducer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -258,6 +267,7 @@
             mBufferQueueDepth);
         mProducer->setName("Camera2-ZslRingBufferConsumer");
         sp<Surface> outSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
             params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
@@ -325,10 +335,17 @@
         mInputStreamId = NO_STREAM;
     }
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    if (nullptr != mInputSurface.get()) {
+        // The surface destructor calls disconnect
+        mInputSurface.clear();
+    }
+#else
     if (nullptr != mInputProducer.get()) {
         mInputProducer->disconnect(NATIVE_WINDOW_API_CPU);
         mInputProducer.clear();
     }
+#endif
 
     return OK;
 }
@@ -387,11 +404,19 @@
 
 void ZslProcessor::doNotifyInputReleasedLocked() {
     assert(nullptr != mInputBuffer.get());
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    assert(nullptr != mInputSurface.get());
+#else
     assert(nullptr != mInputProducer.get());
+#endif
 
     sp<GraphicBuffer> gb;
     sp<Fence> fence;
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    auto rc = mInputSurface->detachNextBuffer(&gb, &fence);
+#else
     auto rc = mInputProducer->detachNextBuffer(&gb, &fence);
+#endif
     if (NO_ERROR != rc) {
         ALOGE("%s: Failed to detach buffer from input producer: %d",
             __FUNCTION__, rc);
@@ -450,9 +475,15 @@
             __FUNCTION__, (unsigned int) metadataIdx);
     }
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    if (nullptr == mInputSurface.get()) {
+        res = client->getCameraDevice()->getInputSurface(
+            &mInputSurface);
+#else
     if (nullptr == mInputProducer.get()) {
         res = client->getCameraDevice()->getInputBufferProducer(
             &mInputProducer);
+#endif
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to retrieve input producer: "
                     "%s (%d)", __FUNCTION__, client->getCameraId(),
@@ -460,9 +491,14 @@
             return res;
         }
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+        res = mInputSurface->connect(NATIVE_WINDOW_API_CPU, new InputProducerListener(this),
+            false);
+#else
         IGraphicBufferProducer::QueueBufferOutput output;
         res = mInputProducer->connect(new InputProducerListener(this),
             NATIVE_WINDOW_API_CPU, false, &output);
+#endif
         if (res != OK) {
             ALOGE("%s: Camera %d: Unable to connect to input producer: "
                     "%s (%d)", __FUNCTION__, client->getCameraId(),
@@ -623,19 +659,32 @@
     }
 
     BufferItem &item = mInputBuffer->getBufferItem();
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    auto rc = mInputSurface->attachBuffer(item.mGraphicBuffer->getNativeBuffer());
+#else
     auto rc = mInputProducer->attachBuffer(&mInputProducerSlot,
         item.mGraphicBuffer);
+#endif
     if (OK != rc) {
         ALOGE("%s: Failed to attach input ZSL buffer to producer: %d",
             __FUNCTION__, rc);
         return rc;
     }
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    mInputSurface->setBuffersTimestamp(item.mTimestamp);
+    mInputSurface->setBuffersDataSpace(static_cast<ui::Dataspace>(item.mDataSpace));
+    mInputSurface->setCrop(&item.mCrop);
+    mInputSurface->setScalingMode(item.mScalingMode);
+    mInputSurface->setBuffersTransform(item.mTransform);
+    rc = mInputSurface->queueBuffer(item.mGraphicBuffer, item.mFence);
+#else
     IGraphicBufferProducer::QueueBufferOutput output;
     IGraphicBufferProducer::QueueBufferInput input(item.mTimestamp,
             item.mIsAutoTimestamp, item.mDataSpace, item.mCrop,
             item.mScalingMode, item.mTransform, item.mFence);
     rc = mInputProducer->queueBuffer(mInputProducerSlot, input, &output);
+#endif
     if (OK != rc) {
         ALOGE("%s: Failed to queue ZSL buffer to producer: %d",
             __FUNCTION__, rc);
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.h b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
index 3186233..a98160a 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.h
@@ -24,8 +24,9 @@
 #include <utils/Condition.h>
 #include <gui/BufferItem.h>
 #include <gui/BufferItemConsumer.h>
-#include <gui/RingBufferConsumer.h>
+#include <gui/Flags.h>
 #include <gui/IProducerListener.h>
+#include <gui/RingBufferConsumer.h>
 #include <camera/CameraMetadata.h>
 
 #include "api1/client2/FrameProcessor.h"
@@ -83,6 +84,20 @@
 
   private:
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    class InputProducerListener : public SurfaceListener {
+    public:
+        InputProducerListener(wp<ZslProcessor> parent) : mParent(parent) {}
+        virtual void onBufferReleased() override;
+        virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /* buffers */)
+            override {}
+        virtual void onBufferDetached(int /* slot */) override {}
+        virtual bool needsReleaseNotify() override { return true; }
+
+    private:
+        wp<ZslProcessor> mParent;
+    };
+#else
     class InputProducerListener : public BnProducerListener {
     public:
         InputProducerListener(wp<ZslProcessor> parent) : mParent(parent) {}
@@ -92,6 +107,7 @@
     private:
         wp<ZslProcessor> mParent;
     };
+#endif
 
     static const nsecs_t kWaitDuration = 10000000; // 10 ms
     nsecs_t mLatestClearedBufferTimestamp;
@@ -139,8 +155,13 @@
     // Input buffer queued into HAL
     sp<RingBufferConsumer::PinnedBufferItem> mInputBuffer;
     sp<RingBufferConsumer>                   mProducer;
+
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    sp<Surface>                              mInputSurface;
+#else
     sp<IGraphicBufferProducer>               mInputProducer;
     int                                      mInputProducerSlot;
+#endif
 
     Condition                                mBuffersToDetachSignal;
     int                                      mBuffersToDetach;
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 105d04f..f469aad 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -71,7 +71,7 @@
         int clientPid,
         uid_t clientUid,
         int servicePid,
-        bool overrideToPortrait) :
+        int rotationOverride) :
     BasicClient(cameraService,
             IInterface::asBinder(remoteCallback),
             attributionAndPermissionUtils,
@@ -84,7 +84,7 @@
             clientPid,
             clientUid,
             servicePid,
-            overrideToPortrait),
+            rotationOverride),
     mRemoteCallback(remoteCallback) {
 }
 
@@ -104,13 +104,13 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         const std::string& originalCameraId) :
     Camera2ClientBase(cameraService, remoteCallback, cameraServiceProxyWrapper,
             attributionAndPermissionUtils, clientPackageName,
             systemNativeClient, clientFeatureId, cameraId, /*API1 camera ID*/ -1, cameraFacing,
             sensorOrientation, clientPid, clientUid, servicePid, overrideForPerfClass,
-            overrideToPortrait),
+            rotationOverride),
     mInputStream(),
     mStreamingRequestId(REQUEST_ID_NONE),
     mRequestIdCounter(0),
@@ -295,7 +295,7 @@
     }
 
     List<const CameraDeviceBase::PhysicalCameraSettingsList> metadataRequestList;
-    std::list<const SurfaceMap> surfaceMapList;
+    std::list<SurfaceMap> surfaceMapList;
     submitInfo->mRequestId = mRequestIdCounter;
     uint32_t loopCounter = 0;
 
@@ -546,14 +546,14 @@
             mRunningSessionStats.mVideoStabilizationMode = entry.data.u8[0];
         }
 
-        if (!mRunningSessionStats.mUsedUltraWide && flags::log_ultrawide_usage()) {
+        if (!mRunningSessionStats.mUsedUltraWide) {
             entry = physicalSettingsList.begin()->metadata.find(
                     ANDROID_CONTROL_ZOOM_RATIO);
             if (entry.count == 1 && entry.data.f[0] < 1.0f ) {
                 mRunningSessionStats.mUsedUltraWide = true;
             }
         }
-        if (!mRunningSessionStats.mUsedSettingsOverrideZoom && flags::log_zoom_override_usage()) {
+        if (!mRunningSessionStats.mUsedSettingsOverrideZoom) {
             entry = physicalSettingsList.begin()->metadata.find(
                     ANDROID_CONTROL_SETTINGS_OVERRIDE);
             if (entry.count == 1 && entry.data.i32[0] ==
@@ -1240,6 +1240,18 @@
     if (!mDevice.get()) {
         return STATUS_ERROR(CameraService::ERROR_DISCONNECTED, "Camera device no longer alive");
     }
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    sp<Surface> surface;
+    status_t err = mDevice->getInputSurface(&surface);
+    if (err != OK) {
+        res = STATUS_ERROR_FMT(CameraService::ERROR_INVALID_OPERATION,
+                "Camera %s: Error getting input Surface: %s (%d)",
+                mCameraIdStr.c_str(), strerror(-err), err);
+    } else {
+        inputSurface->name = toString16("CameraInput");
+        inputSurface->graphicBufferProducer = surface->getIGraphicBufferProducer();
+    }
+#else
     sp<IGraphicBufferProducer> producer;
     status_t err = mDevice->getInputBufferProducer(&producer);
     if (err != OK) {
@@ -1250,6 +1262,7 @@
         inputSurface->name = toString16("CameraInput");
         inputSurface->graphicBufferProducer = producer;
     }
+#endif
     return res;
 }
 
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.h b/services/camera/libcameraservice/api2/CameraDeviceClient.h
index 505c086..42f2752 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.h
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.h
@@ -61,7 +61,7 @@
             int clientPid,
             uid_t clientUid,
             int servicePid,
-            bool overrideToPortrait);
+            int rotationOverride);
 
     sp<hardware::camera2::ICameraDeviceCallbacks> mRemoteCallback;
 };
@@ -193,7 +193,7 @@
             uid_t clientUid,
             int servicePid,
             bool overrideForPerfClass,
-            bool overrideToPortrait,
+            int rotationOverride,
             const std::string& originalCameraId);
     virtual ~CameraDeviceClient();
 
diff --git a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
index 82c3d6d..77de874 100644
--- a/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
+++ b/services/camera/libcameraservice/api2/CameraOfflineSessionClient.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERAOFFLINESESSIONCLIENT_H
 #define ANDROID_SERVERS_CAMERA_PHOTOGRAPHY_CAMERAOFFLINESESSIONCLIENT_H
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/BnCameraOfflineSession.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include "common/FrameProcessorBase.h"
@@ -59,7 +60,7 @@
                     // (v)ndk doesn't have offline session support
                     clientPackageName, /*overridePackageName*/false, clientFeatureId,
                     cameraIdStr, cameraFacing, sensorOrientation, clientPid, clientUid, servicePid,
-                    /*overrideToPortrait*/false),
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE),
             mRemoteCallback(remoteCallback), mOfflineSession(session),
             mCompositeStreamMap(offlineCompositeStreamMap) {}
 
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 1b7fc6e..fa569ce 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -23,7 +23,7 @@
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/CameraMetadata.h>
 #include <camera/camera2/OutputConfiguration.h>
-#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
 #include "common/CameraDeviceBase.h"
 #include "device3/Camera3StreamInterface.h"
 
@@ -96,9 +96,12 @@
             const CameraMetadata& settings) override;
 
 protected:
-    struct ProducerListener : public BnProducerListener {
-        // ProducerListener impementation
+    struct StreamSurfaceListener : public SurfaceListener {
+        // StreamSurfaceListener implementation
         void onBufferReleased() override { /*No impl. for now*/ };
+        bool needsReleaseNotify() override { return true; };
+        void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /*buffers*/) override {};
+        void onBufferDetached(int /*slot*/) override {};
     };
 
     status_t registerCompositeStreamListener(int32_t streamId);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 1bd0b85..244a1e5 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -22,13 +22,15 @@
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
 #include <camera/StringUtils.h>
 
-#include "api1/client2/JpegProcessor.h"
-#include "common/CameraProviderManager.h"
-#include "utils/SessionConfigurationUtils.h"
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include "api1/client2/JpegProcessor.h"
+#include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
+
 #include "DepthCompositeStream.h"
 
 namespace android {
@@ -48,7 +50,7 @@
         mBlobHeight(0),
         mDepthBufferAcquired(false),
         mBlobBufferAcquired(false),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mMaxJpegBufferSize(-1),
         mUHRMaxJpegBufferSize(-1),
         mIsLogicalCamera(false) {
@@ -517,6 +519,15 @@
     return false;
 }
 
+bool DepthCompositeStream::isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_DYNAMIC_DEPTH)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB)) {
+        return true;
+    }
+
+    return false;
+}
+
 static bool setContains(std::unordered_set<int32_t> containerSet, int32_t value) {
     return containerSet.find(value) != containerSet.end();
 }
@@ -605,6 +616,12 @@
         return NO_INIT;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mBlobConsumer->setFrameAvailableListener(this);
+    mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
+    mBlobSurface = mBlobConsumer->getSurface();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -612,6 +629,7 @@
     mBlobConsumer->setFrameAvailableListener(this);
     mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
     mBlobSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
             id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
@@ -630,11 +648,18 @@
         return ret;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mDepthConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mDepthConsumer->setFrameAvailableListener(this);
+    mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
+    mDepthSurface = mDepthConsumer->getSurface();
+#else
     BufferQueue::createBufferQueue(&producer, &consumer);
     mDepthConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
     mDepthConsumer->setFrameAvailableListener(this);
     mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
     mDepthSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     std::vector<int> depthSurfaceId;
     ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
             kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
@@ -681,7 +706,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mBlobStreamId);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index f797f9c..75deef7 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -46,6 +46,7 @@
     ~DepthCompositeStream() override;
 
     static bool isDepthCompositeStream(const sp<Surface> &surface);
+    static bool isDepthCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
@@ -129,12 +130,12 @@
     static const auto kDepthMapDataSpace = HAL_DATASPACE_DEPTH;
     static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
 
-    int                  mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
-    size_t               mBlobWidth, mBlobHeight;
-    sp<CpuConsumer>      mBlobConsumer, mDepthConsumer;
-    bool                 mDepthBufferAcquired, mBlobBufferAcquired;
-    sp<Surface>          mDepthSurface, mBlobSurface, mOutputSurface;
-    sp<ProducerListener> mProducerListener;
+    int                         mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
+    size_t                      mBlobWidth, mBlobHeight;
+    sp<CpuConsumer>             mBlobConsumer, mDepthConsumer;
+    bool                        mDepthBufferAcquired, mBlobBufferAcquired;
+    sp<Surface>                 mDepthSurface, mBlobSurface, mOutputSurface;
+    sp<StreamSurfaceListener>   mStreamSurfaceListener;
 
     ssize_t              mMaxJpegBufferSize;
     ssize_t              mUHRMaxJpegBufferSize;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 68e9ad4..3af673b 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -25,11 +25,12 @@
 
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
-#include <libyuv.h>
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
+#include <libyuv.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <camera/StringUtils.h>
 
 #include <mediadrm/ICrypto.h>
 #include <media/MediaCodecBuffer.h>
@@ -40,6 +41,7 @@
 #include "common/CameraDeviceBase.h"
 #include "utils/ExifUtils.h"
 #include "utils/SessionConfigurationUtils.h"
+#include "utils/Utils.h"
 #include "HeicEncoderInfoManager.h"
 #include "HeicCompositeStream.h"
 
@@ -67,7 +69,7 @@
         mMainImageStreamId(-1),
         mMainImageSurfaceId(-1),
         mYuvBufferAcquired(false),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mDequeuedOutputBufferCnt(0),
         mCodecOutputCounter(0),
         mQuality(-1),
@@ -94,6 +96,11 @@
     mMainImageSurface.clear();
 }
 
+bool HeicCompositeStream::isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    return ((streamInfo.dataSpace == static_cast<android_dataspace_t>(HAL_DATASPACE_HEIF)) &&
+            (streamInfo.format == HAL_PIXEL_FORMAT_BLOB));
+}
+
 bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
     ANativeWindow *anw = surface.get();
     status_t err;
@@ -136,6 +143,13 @@
         return NO_INIT;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
+    mAppSegmentConsumer->setFrameAvailableListener(this);
+    mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+    mAppSegmentSurface = mAppSegmentConsumer->getSurface();
+    sp<IGraphicBufferProducer> producer = mAppSegmentSurface->getIGraphicBufferProducer();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -143,6 +157,7 @@
     mAppSegmentConsumer->setFrameAvailableListener(this);
     mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
     mAppSegmentSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     mStaticInfo = device->info();
 
@@ -172,8 +187,13 @@
             return res;
         }
     } else {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mMainImageConsumer = new CpuConsumer(1);
+        producer = mMainImageConsumer->getSurface()->getIGraphicBufferProducer();
+#else
         BufferQueue::createBufferQueue(&producer, &consumer);
         mMainImageConsumer = new CpuConsumer(consumer, 1);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         mMainImageConsumer->setFrameAvailableListener(this);
         mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
     }
@@ -507,7 +527,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mMainImageStreamId);
@@ -1291,7 +1311,9 @@
         if (firstPendingFrame != mPendingInputFrames.end()) {
             updateCodecQualityLocked(firstPendingFrame->second.quality);
         } else {
-            markTrackerIdle();
+            if (mSettingsByFrameNumber.size() == 0) {
+                markTrackerIdle();
+            }
         }
     }
 }
@@ -1459,7 +1481,7 @@
     const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(CameraBlob));
     const CameraBlob *blob = (const CameraBlob*)(header);
     if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
-        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, blob->blobId);
+        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, eToI(blob->blobId));
         return 0;
     }
 
@@ -1583,7 +1605,7 @@
         // The chrome plane could be either Cb first, or Cr first. Take the
         // smaller address.
         uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
-        MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
+        MediaImage2::PlaneIndex dstPlane = codecUPlaneFirst ? MediaImage2::U : MediaImage2::V;
         for (auto row = top/2; row < (top+height)/2; row++) {
             uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
                     imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
@@ -1717,7 +1739,9 @@
                     // removed, they are simply skipped.
                     mPendingInputFrames.erase(failingFrameNumber);
                     if (mPendingInputFrames.size() == 0) {
-                        markTrackerIdle();
+                        if (mSettingsByFrameNumber.size() == 0) {
+                            markTrackerIdle();
+                        }
                     }
                     return true;
                 }
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index b539cdd..ba10e05 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -19,7 +19,6 @@
 
 #include <queue>
 
-#include <gui/IProducerListener.h>
 #include <gui/CpuConsumer.h>
 
 #include <media/hardware/VideoAPI.h>
@@ -42,6 +41,7 @@
     ~HeicCompositeStream() override;
 
     static bool isHeicCompositeStream(const sp<Surface> &surface);
+    static bool isHeicCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
             bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
@@ -233,10 +233,10 @@
     bool              mYuvBufferAcquired; // Only applicable to HEVC codec
     std::queue<int64_t> mMainImageFrameNumbers;
 
-    static const int32_t kMaxOutputSurfaceProducerCount = 1;
-    sp<Surface>       mOutputSurface;
-    sp<ProducerListener> mProducerListener;
-    int32_t           mDequeuedOutputBufferCnt;
+    static const int32_t        kMaxOutputSurfaceProducerCount = 1;
+    sp<Surface>                 mOutputSurface;
+    sp<StreamSurfaceListener>   mStreamSurfaceListener;
+    int32_t                     mDequeuedOutputBufferCnt;
 
     // Map from frame number to JPEG setting of orientation+quality
     struct HeicSettings {
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index a1b9383..c5bd7a9 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -14,9 +14,6 @@
  * limitations under the License.
  */
 
-#include "hardware/gralloc.h"
-#include "system/graphics-base-v1.0.h"
-#include "system/graphics-base-v1.1.h"
 #define LOG_TAG "Camera3-JpegRCompositeStream"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
@@ -25,11 +22,16 @@
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
 
 #include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
+
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
+#include <hardware/gralloc.h>
+#include <system/graphics-base-v1.0.h>
+#include <system/graphics-base-v1.1.h>
 #include <ultrahdr/jpegr.h>
 #include <utils/ExifUtils.h>
 #include <utils/Log.h>
-#include "utils/SessionConfigurationUtils.h"
 #include <utils/Trace.h>
 
 #include "JpegRCompositeStream.h"
@@ -54,7 +56,7 @@
         mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
         mOutputStreamUseCase(0),
         mFirstRequestLatency(-1),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mMaxJpegBufferSize(-1),
         mUHRMaxJpegBufferSize(-1),
         mStaticInfo(device->info()) {
@@ -520,6 +522,15 @@
     return false;
 }
 
+bool JpegRCompositeStream::isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo) {
+    if ((streamInfo.format == HAL_PIXEL_FORMAT_BLOB) &&
+            (streamInfo.dataSpace == static_cast<int>(kJpegRDataSpace))) {
+        return true;
+    }
+
+    return false;
+}
+
 void JpegRCompositeStream::deriveDynamicRangeAndDataspace(int64_t dynamicProfile,
         int64_t* /*out*/dynamicRange, int64_t* /*out*/dataSpace) {
     if ((dynamicRange == nullptr) || (dataSpace == nullptr)) {
@@ -564,6 +575,12 @@
             mStaticInfo, mP010DynamicRange,
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mP010Consumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mP010Consumer->setFrameAvailableListener(this);
+    mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+    mP010Surface = mP010Consumer->getSurface();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -571,6 +588,7 @@
     mP010Consumer->setFrameAvailableListener(this);
     mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
     mP010Surface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
             static_cast<android_dataspace>(mP010DataSpace), rotation,
@@ -588,11 +606,18 @@
     }
 
     if (mSupportInternalJpeg) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+        mBlobConsumer->setFrameAvailableListener(this);
+        mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+        mBlobSurface = mBlobConsumer->getSurface();
+#else
         BufferQueue::createBufferQueue(&producer, &consumer);
         mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
         mBlobConsumer->setFrameAvailableListener(this);
         mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
         mBlobSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         std::vector<int> blobSurfaceId;
         ret = device->createStream(mBlobSurface, width, height, format,
                 kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
@@ -644,7 +669,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mP010StreamId);
@@ -832,8 +857,8 @@
     (*compositeOutput)[0].colorSpace =
         ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED;
 
-    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(staticInfo,
-                streamInfo.dynamicRangeProfile,
+    if (CameraProviderManager::isConcurrentDynamicRangeCaptureSupported(
+                staticInfo, dynamicRange,
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD)) {
         compositeOutput->push_back({});
         (*compositeOutput)[1].width = streamInfo.width;
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 016d57c..d3ab19c 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -43,6 +43,7 @@
     ~JpegRCompositeStream() override;
 
     static bool isJpegRCompositeStream(const sp<Surface> &surface);
+    static bool isJpegRCompositeStreamInfo(const OutputStreamInfo& streamInfo);
 
     // CompositeStream overrides
     status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
@@ -127,7 +128,8 @@
     int32_t              mOutputColorSpace;
     int64_t              mOutputStreamUseCase;
     nsecs_t              mFirstRequestLatency;
-    sp<ProducerListener> mProducerListener;
+
+    sp<StreamSurfaceListener> mStreamSurfaceListener;
 
     ssize_t              mMaxJpegBufferSize;
     ssize_t              mUHRMaxJpegBufferSize;
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.cpp b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
index 2239c9f..18069fe 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.cpp
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.cpp
@@ -27,8 +27,10 @@
 #include <gui/Surface.h>
 #include <gui/Surface.h>
 
+#include <android/hardware/ICameraService.h>
 #include <camera/CameraSessionStats.h>
 #include <camera/StringUtils.h>
+#include <com_android_window_flags.h>
 
 #include "common/Camera2ClientBase.h"
 
@@ -42,6 +44,8 @@
 
 using namespace camera2;
 
+namespace wm_flags = com::android::window::flags;
+
 // Interface used by CameraService
 
 template <typename TClientBase>
@@ -61,11 +65,11 @@
         uid_t clientUid,
         int servicePid,
         bool overrideForPerfClass,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool legacyClient):
         TClientBase(cameraService, remoteCallback, attributionAndPermissionUtils, clientPackageName,
                 systemNativeClient, clientFeatureId, cameraId, api1CameraId, cameraFacing,
-                sensorOrientation, clientPid, clientUid, servicePid, overrideToPortrait),
+                sensorOrientation, clientPid, clientUid, servicePid, rotationOverride),
         mSharedCameraCallbacks(remoteCallback),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
         mDeviceActive(false), mApi1CameraId(api1CameraId)
@@ -117,14 +121,14 @@
                     new HidlCamera3Device(mCameraServiceProxyWrapper,
                             TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mOverrideToPortrait, mLegacyClient);
+                            TClientBase::mRotationOverride, mLegacyClient);
             break;
         case IPCTransport::AIDL:
             mDevice =
                     new AidlCamera3Device(mCameraServiceProxyWrapper,
                             TClientBase::mAttributionAndPermissionUtils,
                             TClientBase::mCameraIdStr, mOverrideForPerfClass,
-                            TClientBase::mOverrideToPortrait, mLegacyClient);
+                            TClientBase::mRotationOverride, mLegacyClient);
              break;
         default:
             ALOGE("%s Invalid transport for camera id %s", __FUNCTION__,
@@ -339,8 +343,9 @@
 
 template <typename TClientBase>
 void Camera2ClientBase<TClientBase>::notifyPhysicalCameraChange(const std::string &physicalId) {
-    // We're only interested in this notification if overrideToPortrait is turned on.
-    if (!TClientBase::mOverrideToPortrait) {
+    using android::hardware::ICameraService;
+    // We're only interested in this notification if rotationOverride is turned on.
+    if (TClientBase::mRotationOverride == ICameraService::ROTATION_OVERRIDE_NONE) {
         return;
     }
 
@@ -350,8 +355,13 @@
     if (orientationEntry.count == 1) {
         int orientation = orientationEntry.data.i32[0];
         int rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_NONE;
-
-        if (orientation == 0 || orientation == 180) {
+        bool landscapeSensor =  (orientation == 0 || orientation == 180);
+        if (((TClientBase::mRotationOverride ==
+                ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) && landscapeSensor) ||
+                        ((wm_flags::enable_camera_compat_for_desktop_windowing() &&
+                                TClientBase::mRotationOverride ==
+                                ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY)
+                                && !landscapeSensor)) {
             rotateAndCropMode = ANDROID_SCALER_ROTATE_AND_CROP_90;
         }
 
diff --git a/services/camera/libcameraservice/common/Camera2ClientBase.h b/services/camera/libcameraservice/common/Camera2ClientBase.h
index c24f92b..c9d5735 100644
--- a/services/camera/libcameraservice/common/Camera2ClientBase.h
+++ b/services/camera/libcameraservice/common/Camera2ClientBase.h
@@ -64,7 +64,7 @@
                       uid_t clientUid,
                       int servicePid,
                       bool overrideForPerfClass,
-                      bool overrideToPortrait,
+                      int rotationOverride,
                       bool legacyClient = false);
     virtual ~Camera2ClientBase();
 
diff --git a/services/camera/libcameraservice/common/CameraDeviceBase.h b/services/camera/libcameraservice/common/CameraDeviceBase.h
index cfc41c3..9c8f5ad 100644
--- a/services/camera/libcameraservice/common/CameraDeviceBase.h
+++ b/services/camera/libcameraservice/common/CameraDeviceBase.h
@@ -25,6 +25,7 @@
 #include <utils/KeyedVector.h>
 #include <utils/Timers.h>
 #include <utils/List.h>
+#include <gui/Flags.h>
 
 #include "hardware/camera2.h"
 #include "camera/CameraMetadata.h"
@@ -136,7 +137,7 @@
      * Output lastFrameNumber is the expected last frame number of the list of requests.
      */
     virtual status_t captureList(const List<const PhysicalCameraSettingsList> &requests,
-                                 const std::list<const SurfaceMap> &surfaceMaps,
+                                 const std::list<SurfaceMap> &surfaceMaps,
                                  int64_t *lastFrameNumber = NULL) = 0;
 
     /**
@@ -152,7 +153,7 @@
      * Output lastFrameNumber is the last frame number of the previous streaming request.
      */
     virtual status_t setStreamingRequestList(const List<const PhysicalCameraSettingsList> &requests,
-                                             const std::list<const SurfaceMap> &surfaceMaps,
+                                             const std::list<SurfaceMap> &surfaceMaps,
                                              int64_t *lastFrameNumber = NULL) = 0;
 
     /**
@@ -305,9 +306,14 @@
      */
     virtual void getOfflineStreamIds(std::vector<int> *offlineStreamIds) = 0;
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    // get the surface of the input stream
+    virtual status_t getInputSurface(sp<Surface> *surface) = 0;
+#else
     // get the buffer producer of the input stream
     virtual status_t getInputBufferProducer(
             sp<IGraphicBufferProducer> *producer) = 0;
+#endif
 
     /**
      * Create a metadata buffer with fields that the HAL device believes are
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 45c3a1f..a03d199 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -34,10 +34,12 @@
 #include <inttypes.h>
 #include <android_companion_virtualdevice_flags.h>
 #include <android_companion_virtualdevice_build_flags.h>
+#include <android/binder_libbinder.h>
 #include <android/binder_manager.h>
 #include <android/hidl/manager/1.2/IServiceManager.h>
 #include <hidl/ServiceManagement.h>
 #include <com_android_internal_camera_flags.h>
+#include <com_android_window_flags.h>
 #include <functional>
 #include <camera_metadata_hidden.h>
 #include <android-base/parseint.h>
@@ -50,6 +52,7 @@
 
 #include "api2/HeicCompositeStream.h"
 #include "device3/ZoomRatioMapper.h"
+#include "utils/Utils.h"
 
 namespace android {
 
@@ -62,6 +65,7 @@
 
 namespace flags = com::android::internal::camera::flags;
 namespace vd_flags = android::companion::virtualdevice::flags;
+namespace wm_flags = com::android::window::flags;
 
 namespace {
 const bool kEnableLazyHal(property_get_bool("ro.camera.enableLazyHal", false));
@@ -90,7 +94,7 @@
         case TorchModeStatus::AVAILABLE_ON:
             return "AVAILABLE_ON";
     }
-    ALOGW("Unexpected HAL torch mode status code %d", s);
+    ALOGW("Unexpected HAL torch mode status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
@@ -103,7 +107,7 @@
         case CameraDeviceStatus::ENUMERATING:
             return "ENUMERATING";
     }
-    ALOGW("Unexpected HAL device status code %d", s);
+    ALOGW("Unexpected HAL device status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
@@ -145,11 +149,7 @@
     using aidl::android::hardware::camera::provider::ICameraProvider;
 
     AIBinder* binder = nullptr;
-    if (flags::lazy_aidl_wait_for_service()) {
-        binder = AServiceManager_waitForService(serviceName.c_str());
-    } else {
-        binder = AServiceManager_checkService(serviceName.c_str());
-    }
+    binder = AServiceManager_waitForService(serviceName.c_str());
 
     if (binder == nullptr) {
         ALOGE("%s: AIDL Camera provider HAL '%s' is not actually available, despite waiting "
@@ -403,13 +403,14 @@
 }
 
 status_t CameraProviderManager::getCameraInfo(const std::string &id,
-        bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const {
+         int rotationOverride, int *portraitRotation,
+         hardware::CameraInfo* info) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
 
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) return NAME_NOT_FOUND;
 
-    return deviceInfo->getCameraInfo(overrideToPortrait, portraitRotation, info);
+    return deviceInfo->getCameraInfo(rotationOverride, portraitRotation, info);
 }
 
 status_t CameraProviderManager::isSessionConfigurationSupported(const std::string& id,
@@ -425,7 +426,8 @@
             bool overrideForPerfClass) {
         CameraMetadata metadata;
         this->getCameraCharacteristicsLocked(id, overrideForPerfClass,
-                                             &metadata, /*overrideToPortrait*/false);
+                                             &metadata,
+                                             hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         return metadata;
     };
     return deviceInfo->isSessionConfigurationSupported(configuration,
@@ -464,22 +466,18 @@
 
 status_t CameraProviderManager::getSessionCharacteristics(
         const std::string& id, const SessionConfiguration& configuration, bool overrideForPerfClass,
-        bool overrideToPortrait, CameraMetadata* sessionCharacteristics /*out*/) const {
-    if (!flags::feature_combination_query()) {
-        return INVALID_OPERATION;
-    }
-
+        int rotationOverride, CameraMetadata* sessionCharacteristics /*out*/) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo == nullptr) {
         return NAME_NOT_FOUND;
     }
 
-    metadataGetter getMetadata = [this, overrideToPortrait](const std::string& id,
+    metadataGetter getMetadata = [this, rotationOverride](const std::string& id,
                                                             bool overrideForPerfClass) {
         CameraMetadata metadata;
         status_t ret = this->getCameraCharacteristicsLocked(id, overrideForPerfClass, &metadata,
-                                                            overrideToPortrait);
+                                                            rotationOverride);
         if (ret != OK) {
             ALOGE("%s: Could not get CameraCharacteristics for device %s", __FUNCTION__,
                   id.c_str());
@@ -508,10 +506,10 @@
 
 status_t CameraProviderManager::getCameraCharacteristics(const std::string &id,
         bool overrideForPerfClass, CameraMetadata* characteristics,
-        bool overrideToPortrait) const {
+        int rotationOverride) const {
     std::lock_guard<std::mutex> lock(mInterfaceMutex);
     return getCameraCharacteristicsLocked(id, overrideForPerfClass, characteristics,
-            overrideToPortrait);
+            rotationOverride);
 }
 
 status_t CameraProviderManager::getHighestSupportedVersion(const std::string &id,
@@ -2114,14 +2112,8 @@
         const std::string& providerName, const sp<ProviderInfo>& providerInfo) {
     using aidl::android::hardware::camera::provider::ICameraProvider;
 
-    std::shared_ptr<ICameraProvider> interface;
-    if (flags::delay_lazy_hal_instantiation()) {
-        // Only get remote instance if already running. Lazy Providers will be
-        // woken up later.
-        interface = mAidlServiceProxy->tryGetService(providerName);
-    } else {
-        interface = mAidlServiceProxy->getService(providerName);
-    }
+    // Only get remote instance if already running. Lazy Providers will be woken up later.
+    std::shared_ptr<ICameraProvider> interface = mAidlServiceProxy->tryGetService(providerName);
 
     if (interface == nullptr) {
         ALOGW("%s: AIDL Camera provider HAL '%s' is not actually available", __FUNCTION__,
@@ -2130,7 +2122,19 @@
     }
 
     AidlProviderInfo *aidlProviderInfo = static_cast<AidlProviderInfo *>(providerInfo.get());
-    return aidlProviderInfo->initializeAidlProvider(interface, mDeviceState);
+    status_t res = aidlProviderInfo->initializeAidlProvider(interface, mDeviceState);
+
+    if (flags::enable_hal_abort_from_cameraservicewatchdog()) {
+        pid_t pid = 0;
+
+        if (AIBinder_toPlatformBinder(interface->asBinder().get())->getDebugPid(&pid) == OK
+                && res == OK) {
+            std::lock_guard<std::mutex> lock(mProviderPidMapLock);
+            mProviderPidMap[providerInfo->mProviderInstance] = pid;
+        }
+    }
+
+    return res;
 }
 
 status_t CameraProviderManager::tryToInitializeHidlProviderLocked(
@@ -2147,7 +2151,23 @@
     }
 
     HidlProviderInfo *hidlProviderInfo = static_cast<HidlProviderInfo *>(providerInfo.get());
-    return hidlProviderInfo->initializeHidlProvider(interface, mDeviceState);
+    status_t res = hidlProviderInfo->initializeHidlProvider(interface, mDeviceState);
+
+    if (flags::enable_hal_abort_from_cameraservicewatchdog()) {
+        pid_t pid = 0;
+
+        auto ret = interface->getDebugInfo([&pid](
+                const ::android::hidl::base::V1_0::DebugInfo& info) {
+            pid = info.pid;
+        });
+
+        if (ret.isOk() && res == OK) {
+            std::lock_guard<std::mutex> lock(mProviderPidMapLock);
+            mProviderPidMap[providerInfo->mProviderInstance] = pid;
+        }
+    }
+
+    return res;
 }
 
 status_t CameraProviderManager::addAidlProviderLocked(const std::string& newProvider) {
@@ -2158,14 +2178,11 @@
     bool preexisting =
             (mAidlProviderWithBinders.find(newProvider) != mAidlProviderWithBinders.end());
     using aidl::android::hardware::camera::provider::ICameraProvider;
-    std::string providerNameUsed  =
-            newProvider.substr(std::string(ICameraProvider::descriptor).size() + 1);
-    if (flags::lazy_aidl_wait_for_service()) {
-        // 'newProvider' has the fully qualified name of the provider service in case of AIDL.
-        // ProviderInfo::mProviderName also has the fully qualified name - so we just compare them
-        // here.
-        providerNameUsed = newProvider;
-    }
+
+    // 'newProvider' has the fully qualified name of the provider service in case of AIDL.
+    // ProviderInfo::mProviderName also has the fully qualified name - so we just compare them
+    // here.
+    std::string providerNameUsed = newProvider;
 
     for (const auto& providerInfo : mProviders) {
         if (providerInfo->mProviderName == providerNameUsed) {
@@ -2259,23 +2276,26 @@
         ALOGW("%s: Camera provider HAL with name '%s' is not registered", __FUNCTION__,
                 provider.c_str());
     } else {
+        if (flags::enable_hal_abort_from_cameraservicewatchdog()) {
+            {
+                std::lock_guard<std::mutex> pidLock(mProviderPidMapLock);
+                mProviderPidMap.erase(provider);
+            }
+        }
+
         // Check if there are any newer camera instances from the same provider and try to
         // initialize.
         for (const auto& providerInfo : mProviders) {
             if (providerInfo->mProviderName == removedProviderName) {
                 IPCTransport providerTransport = providerInfo->getIPCTransport();
-                std::string removedAidlProviderName = getFullAidlProviderName(removedProviderName);
-                if (flags::lazy_aidl_wait_for_service()) {
-                    removedAidlProviderName = removedProviderName;
-                }
                 switch(providerTransport) {
                     case IPCTransport::HIDL:
                         return tryToInitializeHidlProviderLocked(removedProviderName, providerInfo);
                     case IPCTransport::AIDL:
-                        return tryToInitializeAidlProviderLocked(removedAidlProviderName,
+                        return tryToInitializeAidlProviderLocked(removedProviderName,
                                 providerInfo);
                     default:
-                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, providerTransport);
+                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, eToI(providerTransport));
                 }
             }
         }
@@ -2362,7 +2382,7 @@
             }
             break;
         default:
-            ALOGE("%s Invalid transport %d", __FUNCTION__, transport);
+            ALOGE("%s Invalid transport %d", __FUNCTION__, eToI(transport));
             return BAD_VALUE;
     }
 
@@ -2438,7 +2458,7 @@
 
 bool CameraProviderManager::ProviderInfo::isExternalLazyHAL() const {
     std::string providerName = mProviderName;
-    if (flags::lazy_aidl_wait_for_service() && getIPCTransport() == IPCTransport::AIDL) {
+    if (getIPCTransport() == IPCTransport::AIDL) {
         using aidl::android::hardware::camera::provider::ICameraProvider;
         providerName =
                 mProviderName.substr(std::string(ICameraProvider::descriptor).size() + 1);
@@ -2446,6 +2466,20 @@
     return kEnableLazyHal && (providerName == kExternalProviderName);
 }
 
+std::set<pid_t> CameraProviderManager::getProviderPids() {
+    std::set<pid_t> pids;
+
+    if (flags::enable_hal_abort_from_cameraservicewatchdog()) {
+        std::lock_guard<std::mutex> lock(mProviderPidMapLock);
+
+        std::transform(mProviderPidMap.begin(), mProviderPidMap.end(),
+                    std::inserter(pids, pids.begin()),
+                    [](std::pair<const std::string, pid_t>& entry) { return entry.second; });
+    }
+
+    return pids;
+}
+
 status_t CameraProviderManager::ProviderInfo::dump(int fd, const Vector<String16>&) const {
     dprintf(fd, "== Camera Provider HAL %s (v2.%d, %s) static info: %zu devices: ==\n",
             mProviderInstance.c_str(),
@@ -2471,8 +2505,9 @@
                 device->hasFlashUnit() ? "true" : "false");
         hardware::CameraInfo info;
         int portraitRotation;
-        status_t res = device->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
-                &info);
+        status_t res = device->getCameraInfo(
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                &portraitRotation, &info);
         if (res != OK) {
             dprintf(fd, "   <Error reading camera info: %s (%d)>\n",
                     strerror(-res), res);
@@ -2483,7 +2518,7 @@
         }
         CameraMetadata info2;
         res = device->getCameraCharacteristics(true /*overrideForPerfClass*/, &info2,
-                /*overrideToPortrait*/false);
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res == INVALID_OPERATION) {
             dprintf(fd, "  API2 not directly supported\n");
         } else if (res != OK) {
@@ -2707,7 +2742,7 @@
         }
         if (!known) {
             ALOGW("Camera provider %s says an unknown camera %s now has torch status %d. Curious.",
-                    mProviderName.c_str(), cameraDeviceName.c_str(), newStatus);
+                mProviderName.c_str(), cameraDeviceName.c_str(), eToI(newStatus));
             return;
         }
         // no lock needed since listener is set up only once during
@@ -2761,10 +2796,15 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraInfo(
-        bool overrideToPortrait, int *portraitRotation,
+        int rotationOverride, int *portraitRotation,
         hardware::CameraInfo *info) const {
     if (info == nullptr) return BAD_VALUE;
 
+    bool freeform_compat_enabled = wm_flags::enable_camera_compat_for_desktop_windowing();
+    if (!freeform_compat_enabled &&
+            rotationOverride > hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
+        ALOGW("Camera compat freeform flag disabled but rotation override is %d", rotationOverride);
+    }
     camera_metadata_ro_entry facing =
             mCameraCharacteristics.find(ANDROID_LENS_FACING);
     if (facing.count == 1) {
@@ -2792,13 +2832,18 @@
         return NAME_NOT_FOUND;
     }
 
-    if (overrideToPortrait && (info->orientation == 0 || info->orientation == 180)) {
+    if (rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT
+            && (info->orientation == 0 || info->orientation == 180)) {
         *portraitRotation = 90;
         if (info->facing == hardware::CAMERA_FACING_FRONT) {
             info->orientation = (360 + info->orientation - 90) % 360;
         } else {
             info->orientation = (360 + info->orientation + 90) % 360;
         }
+    } else if (freeform_compat_enabled &&
+            rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_ROTATION_ONLY
+            && (info->orientation == 90 || info->orientation == 270)) {
+        *portraitRotation = info->facing == hardware::CAMERA_FACING_BACK ? 90 : 270;
     } else {
         *portraitRotation = 0;
     }
@@ -2828,7 +2873,8 @@
 }
 
 status_t CameraProviderManager::ProviderInfo::DeviceInfo3::getCameraCharacteristics(
-        bool overrideForPerfClass, CameraMetadata *characteristics, bool overrideToPortrait) {
+        bool overrideForPerfClass, CameraMetadata *characteristics,
+        int rotationOverride) {
     if (characteristics == nullptr) return BAD_VALUE;
 
     if (!overrideForPerfClass && mCameraCharNoPCOverride != nullptr) {
@@ -2837,7 +2883,7 @@
         *characteristics = mCameraCharacteristics;
     }
 
-    if (overrideToPortrait) {
+    if (rotationOverride == hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT) {
         const auto &lensFacingEntry = characteristics->find(ANDROID_LENS_FACING);
         const auto &sensorOrientationEntry = characteristics->find(ANDROID_SENSOR_ORIENTATION);
         uint8_t lensFacing = lensFacingEntry.data.u8[0];
@@ -3198,11 +3244,11 @@
 
 status_t CameraProviderManager::getCameraCharacteristicsLocked(const std::string &id,
         bool overrideForPerfClass, CameraMetadata* characteristics,
-        bool overrideToPortrait) const {
+        int rotationOverride) const {
     auto deviceInfo = findDeviceInfoLocked(id);
     if (deviceInfo != nullptr) {
         return deviceInfo->getCameraCharacteristics(overrideForPerfClass, characteristics,
-                overrideToPortrait);
+                rotationOverride);
     }
 
     // Find hidden physical camera characteristics
@@ -3238,8 +3284,9 @@
 
         hardware::CameraInfo info;
         int portraitRotation;
-        status_t res = deviceInfo->getCameraInfo(/*overrideToPortrait*/false, &portraitRotation,
-                &info);
+        status_t res = deviceInfo->getCameraInfo(
+                /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                &portraitRotation, &info);
         if (res != OK) {
             ALOGE("%s: Error reading camera info: %s (%d)", __FUNCTION__, strerror(-res), res);
             continue;
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index 248227d..b686a58 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -292,7 +292,8 @@
      * Return the old camera API camera info
      */
     status_t getCameraInfo(const std::string &id,
-            bool overrideToPortrait, int *portraitRotation, hardware::CameraInfo* info) const;
+            int rotationOverride, int *portraitRotation,
+            hardware::CameraInfo* info) const;
 
     /**
      * Return API2 camera characteristics - returns NAME_NOT_FOUND if a device ID does
@@ -300,7 +301,7 @@
      */
     status_t getCameraCharacteristics(const std::string &id,
             bool overrideForPerfClass, CameraMetadata* characteristics,
-            bool overrideToPortrait) const;
+            int rotationOverride) const;
 
     status_t isConcurrentSessionConfigurationSupported(
             const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>
@@ -331,7 +332,7 @@
      status_t getSessionCharacteristics(const std::string& id,
             const SessionConfiguration &configuration,
             bool overrideForPerfClass,
-            bool overrideToPortrait,
+            int rotationOverride,
             CameraMetadata* sessionCharacteristics /*out*/) const;
 
     /**
@@ -430,6 +431,11 @@
     // LocalRegistrationCallback::onServiceRegistration
     virtual void onServiceRegistration(const String16& name, const sp<IBinder> &binder) override;
 
+    /*
+     * Return list of provider pid
+     */
+    std::set<pid_t> getProviderPids();
+
     /**
      * Dump out information about available providers and devices
      */
@@ -626,7 +632,8 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(bool overrideToPortrait,
+            virtual status_t getCameraInfo(
+                    int rotationOverride,
                     int *portraitRotation,
                     hardware::CameraInfo *info) const = 0;
             virtual bool isAPI1Compatible() const = 0;
@@ -634,7 +641,7 @@
             virtual status_t getCameraCharacteristics(
                     [[maybe_unused]] bool overrideForPerfClass,
                     [[maybe_unused]] CameraMetadata *characteristics,
-                    [[maybe_unused]] bool overrideToPortrait) {
+                    [[maybe_unused]] int rotationOverride) {
                 return INVALID_OPERATION;
             }
             virtual status_t getPhysicalCameraCharacteristics(
@@ -705,7 +712,8 @@
             virtual status_t setTorchMode(bool enabled) = 0;
             virtual status_t turnOnTorchWithStrengthLevel(int32_t torchStrength) = 0;
             virtual status_t getTorchStrengthLevel(int32_t *torchStrength) = 0;
-            virtual status_t getCameraInfo(bool overrideToPortrait,
+            virtual status_t getCameraInfo(
+                    int rotationOverride,
                     int *portraitRotation,
                     hardware::CameraInfo *info) const override;
             virtual bool isAPI1Compatible() const override;
@@ -713,7 +721,7 @@
             virtual status_t getCameraCharacteristics(
                     bool overrideForPerfClass,
                     CameraMetadata *characteristics,
-                    bool overrideToPortrait) override;
+                    int rotationOverride) override;
             virtual status_t getPhysicalCameraCharacteristics(const std::string& physicalCameraId,
                     CameraMetadata *characteristics) const override;
             virtual status_t filterSmallJpegSizes() override;
@@ -911,13 +919,16 @@
     // Provider names of AIDL providers with retrieved binders.
     std::set<std::string> mAidlProviderWithBinders;
 
+    std::mutex mProviderPidMapLock;
+    std::map<std::string, pid_t> mProviderPidMap;
+
     static const char* deviceStatusToString(
         const hardware::camera::common::V1_0::CameraDeviceStatus&);
     static const char* torchStatusToString(
         const hardware::camera::common::V1_0::TorchModeStatus&);
 
     status_t getCameraCharacteristicsLocked(const std::string &id, bool overrideForPerfClass,
-            CameraMetadata* characteristics, bool overrideToPortrait) const;
+            CameraMetadata* characteristics, int rotationOverride) const;
     void filterLogicalCameraIdsLocked(std::vector<std::string>& deviceIds) const;
 
     status_t getSystemCameraKindLocked(const std::string& id, SystemCameraKind *kind) const;
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
index 719ff2c..57df314 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.cpp
@@ -417,7 +417,8 @@
 int processDepthPhotoFrame(DepthPhotoInputFrame inputFrame, size_t depthPhotoBufferSize,
         void* depthPhotoBuffer /*out*/, size_t* depthPhotoActualSize /*out*/) {
     if ((inputFrame.mMainJpegBuffer == nullptr) || (inputFrame.mDepthMapBuffer == nullptr) ||
-            (depthPhotoBuffer == nullptr) || (depthPhotoActualSize == nullptr)) {
+            (depthPhotoBuffer == nullptr) || (depthPhotoActualSize == nullptr) ||
+            (inputFrame.mMaxJpegSize < MIN_JPEG_BUFFER_SIZE)) {
         return BAD_VALUE;
     }
 
diff --git a/services/camera/libcameraservice/common/DepthPhotoProcessor.h b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
index 09b6935..9e79fc0 100644
--- a/services/camera/libcameraservice/common/DepthPhotoProcessor.h
+++ b/services/camera/libcameraservice/common/DepthPhotoProcessor.h
@@ -23,6 +23,9 @@
 namespace android {
 namespace camera3 {
 
+// minimal jpeg buffer size: 256KB. Blob header is not included.
+constexpr const size_t MIN_JPEG_BUFFER_SIZE = 256 * 1024;
+
 enum DepthPhotoOrientation {
     DEPTH_ORIENTATION_0_DEGREES   = 0,
     DEPTH_ORIENTATION_90_DEGREES  = 90,
diff --git a/services/camera/libcameraservice/common/HalConversionsTemplated.h b/services/camera/libcameraservice/common/HalConversionsTemplated.h
index 96a715c..c586062 100644
--- a/services/camera/libcameraservice/common/HalConversionsTemplated.h
+++ b/services/camera/libcameraservice/common/HalConversionsTemplated.h
@@ -19,6 +19,7 @@
 #include "common/CameraProviderManager.h"
 
 #include <device3/Camera3StreamInterface.h>
+#include <utils/Utils.h>
 
 namespace android {
 
@@ -48,7 +49,7 @@
         case HalCameraDeviceStatus::ENUMERATING:
             return CameraDeviceStatus::ENUMERATING;
     }
-    ALOGW("Unexpectedcamera device status code %d", s);
+    ALOGW("Unexpectedcamera device status code %d", eToI(s));
     return CameraDeviceStatus::NOT_PRESENT;
 }
 
@@ -74,7 +75,7 @@
         case HalTorchModeStatus::AVAILABLE_ON:
             return TorchModeStatus::AVAILABLE_ON;
     }
-    ALOGW("Unexpectedcamera torch mode status code %d", s);
+    ALOGW("Unexpectedcamera torch mode status code %d", eToI(s));
     return TorchModeStatus::NOT_AVAILABLE;
 }
 
@@ -88,7 +89,7 @@
         case HalCameraDeviceStatus::ENUMERATING:
             return "ENUMERATING";
     }
-    ALOGW("Unexpected HAL device status code %d", s);
+    ALOGW("Unexpected HAL device status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
diff --git a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
index 41e0cd1..4bfe11d 100644
--- a/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/aidl/AidlProviderInfo.cpp
@@ -109,11 +109,8 @@
         std::shared_ptr<ICameraProvider>& interface, int64_t currentDeviceState) {
 
     using aidl::android::hardware::camera::provider::ICameraProvider;
-    std::string parsedProviderName = mProviderName;
-    if (flags::lazy_aidl_wait_for_service()) {
-        parsedProviderName =
+    std::string parsedProviderName =
                 mProviderName.substr(std::string(ICameraProvider::descriptor).size() + 1);
-    }
 
     status_t res = parseProviderName(parsedProviderName, &mType, &mId);
     if (res != OK) {
@@ -529,13 +526,11 @@
                 __FUNCTION__, strerror(-res), res);
         return;
     }
-    if (flags::camera_manual_flash_strength_control()) {
-        res = fixupManualFlashStrengthControlTags(mCameraCharacteristics);
-        if (OK != res) {
-            ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            return;
-        }
+    res = fixupManualFlashStrengthControlTags(mCameraCharacteristics);
+    if (OK != res) {
+        ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return;
     }
 
     auto stat = addDynamicDepthTags();
@@ -622,12 +617,10 @@
         mHasFlashUnit = false;
     }
 
-    if (flags::feature_combination_query()) {
-        res = addSessionConfigQueryVersionTag();
-        if (OK != res) {
-            ALOGE("%s: Unable to add sessionConfigurationQueryVersion tag: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-        }
+    res = addSessionConfigQueryVersionTag();
+    if (OK != res) {
+        ALOGE("%s: Unable to add sessionConfigurationQueryVersion tag: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
     }
 
     camera_metadata_entry entry =
@@ -684,13 +677,11 @@
                         __FUNCTION__, strerror(-res), res);
             }
 
-            if (flags::camera_manual_flash_strength_control()) {
-                res = fixupManualFlashStrengthControlTags(mPhysicalCameraCharacteristics[id]);
-                if (OK != res) {
-                    ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
-                            __FUNCTION__, strerror(-res), res);
-                    return;
-                }
+            res = fixupManualFlashStrengthControlTags(mPhysicalCameraCharacteristics[id]);
+            if (OK != res) {
+                ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                return;
             }
         }
     }
@@ -844,11 +835,7 @@
                     mVersion.get_minor());
             return INVALID_OPERATION;
         }
-        if (flags::feature_combination_query()) {
-            ret = interface->isStreamCombinationWithSettingsSupported(streamConfiguration, status);
-        } else {
-            return INVALID_OPERATION;
-        }
+        ret = interface->isStreamCombinationWithSettingsSupported(streamConfiguration, status);
     } else {
         ret = interface->isStreamCombinationSupported(streamConfiguration, status);
     }
@@ -887,10 +874,6 @@
         return res;
     }
 
-    if (!flags::feature_combination_query()) {
-        return INVALID_OPERATION;
-    }
-
     auto err = interface->constructDefaultRequestSettings(id, &request);
     if (!err.isOk()) {
         ALOGE("%s: Transaction error: %s", __FUNCTION__, err.getMessage());
@@ -1001,16 +984,16 @@
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
         res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
-                /*overrideToPortrait*/false);
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res != OK) {
             return res;
         }
         camera3::metadataGetter getMetadata =
                 [this](const std::string &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
-                    mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
-                                                   &physicalDeviceInfo,
-                                                   /*overrideToPortrait*/false);
+                    mManager->getCameraCharacteristicsLocked(
+                            id, overrideForPerfClass, &physicalDeviceInfo,
+                            hardware::ICameraService::ROTATION_OVERRIDE_NONE);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 6eaf41f..6cedb04 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -26,6 +26,7 @@
 #include "device3/ZoomRatioMapper.h"
 #include <utils/SessionConfigurationUtilsHidl.h>
 #include <utils/Trace.h>
+#include <utils/Utils.h>
 
 #include <android/hardware/camera/device/3.7/ICameraDevice.h>
 
@@ -69,7 +70,7 @@
         case Status::INTERNAL_ERROR:
             return INVALID_OPERATION;
     }
-    ALOGW("Unexpected HAL status code %d", s);
+    ALOGW("Unexpected HAL status code %d", eToI(s));
     return INVALID_OPERATION;
 }
 
@@ -111,7 +112,7 @@
         case Status::INTERNAL_ERROR:
             return "INTERNAL_ERROR";
     }
-    ALOGW("Unexpected HAL status code %d", s);
+    ALOGW("Unexpected HAL status code %d", eToI(s));
     return "UNKNOWN_ERROR";
 }
 
@@ -591,7 +592,7 @@
     }
     if (status != Status::OK) {
         ALOGE("%s: Unable to get camera characteristics for device %s: %s (%d)",
-                __FUNCTION__, id.c_str(), statusToString(status), status);
+                __FUNCTION__, id.c_str(), statusToString(status), eToI(status));
         return;
     }
 
@@ -615,13 +616,12 @@
                 __FUNCTION__, strerror(-res), res);
         return;
     }
-    if (flags::camera_manual_flash_strength_control()) {
-        res = fixupManualFlashStrengthControlTags(mCameraCharacteristics);
-        if (OK != res) {
-            ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-            return;
-        }
+
+    res = fixupManualFlashStrengthControlTags(mCameraCharacteristics);
+    if (OK != res) {
+        ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
+        return;
     }
 
     auto stat = addDynamicDepthTags();
@@ -692,12 +692,10 @@
         mHasFlashUnit = false;
     }
 
-    if (flags::feature_combination_query()) {
-        res = addSessionConfigQueryVersionTag();
-        if (OK != res) {
-            ALOGE("%s: Unable to add sessionConfigurationQueryVersion tag: %s (%d)",
-                    __FUNCTION__, strerror(-res), res);
-        }
+    res = addSessionConfigQueryVersionTag();
+    if (OK != res) {
+        ALOGE("%s: Unable to add sessionConfigurationQueryVersion tag: %s (%d)",
+                __FUNCTION__, strerror(-res), res);
     }
 
     camera_metadata_entry entry =
@@ -770,7 +768,7 @@
             if (status != Status::OK) {
                 ALOGE("%s: Unable to get physical camera %s characteristics for device %s: %s (%d)",
                         __FUNCTION__, id.c_str(), mId.c_str(),
-                        statusToString(status), status);
+                        statusToString(status), eToI(status));
                 return;
             }
 
@@ -781,13 +779,11 @@
                         __FUNCTION__, strerror(-res), res);
             }
 
-            if (flags::camera_manual_flash_strength_control()) {
-                res = fixupManualFlashStrengthControlTags(mPhysicalCameraCharacteristics[id]);
-                if (OK != res) {
-                    ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
-                            __FUNCTION__, strerror(-res), res);
-                    return;
-                }
+            res = fixupManualFlashStrengthControlTags(mPhysicalCameraCharacteristics[id]);
+            if (OK != res) {
+                ALOGE("%s: Unable to fix up manual flash strength control tags: %s (%d)",
+                        __FUNCTION__, strerror(-res), res);
+                return;
             }
         }
     }
@@ -928,7 +924,7 @@
                 res = INVALID_OPERATION;
                 break;
             default:
-                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, callStatus);
+                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, eToI(callStatus));
                 res = UNKNOWN_ERROR;
         }
     } else {
@@ -957,7 +953,7 @@
                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                         perfClassPrimaryCameraIds, cameraId, targetSdkVersion);
         res = mManager->getCameraCharacteristicsLocked(cameraId, overrideForPerfClass, &deviceInfo,
-                /*overrideToPortrait*/false);
+                 hardware::ICameraService::ROTATION_OVERRIDE_NONE);
         if (res != OK) {
             return res;
         }
@@ -965,7 +961,7 @@
                 [this](const std::string &id, bool overrideForPerfClass) {
                     CameraMetadata physicalDeviceInfo;
                     mManager->getCameraCharacteristicsLocked(id, overrideForPerfClass,
-                            &physicalDeviceInfo, /*overrideToPortrait*/false);
+                            &physicalDeviceInfo, hardware::ICameraService::ROTATION_OVERRIDE_NONE);
                     return physicalDeviceInfo;
                 };
         std::vector<std::string> physicalCameraIds;
@@ -1076,7 +1072,7 @@
                         break;
                     default:
                         ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
-                                  callStatus);
+                                eToI(callStatus));
                         res = UNKNOWN_ERROR;
                 }
             } else {
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
index c42e51a..65fee7d 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.cpp
@@ -485,7 +485,7 @@
                     streamId, bufferCount);
         }
     }
-    std::string linesStr = std::move(lines.str());
+    std::string linesStr = lines.str();
     write(fd, linesStr.c_str(), linesStr.size());
 }
 
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.h b/services/camera/libcameraservice/device3/Camera3BufferManager.h
index 64aaa230..27fcf96 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.h
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.h
@@ -68,7 +68,7 @@
      * by the consumer end point, the BufferQueueProducer callback onBufferReleased will call
      * returnBufferForStream() to return the free buffer to this buffer manager. If the stream
      * uses buffer manager to manage the stream buffers, it should disable the BufferQueue
-     * allocation via IGraphicBufferProducer::allowAllocation(false).
+     * allocation via Surface::allowAllocation(false).
      *
      * Registering an already registered stream has no effect.
      *
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index f4d8f7f..5721745 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -85,7 +85,7 @@
 
 Camera3Device::Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-        const std::string &id, bool overrideForPerfClass, bool overrideToPortrait,
+        const std::string &id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient):
         AttributionAndPermissionUtilsEncapsulator(attributionAndPermissionUtils),
         mCameraServiceProxyWrapper(cameraServiceProxyWrapper),
@@ -111,7 +111,7 @@
         mLastTemplateId(-1),
         mNeedFixupMonochromeTags(false),
         mOverrideForPerfClass(overrideForPerfClass),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mRotateAndCropOverride(ANDROID_SCALER_ROTATE_AND_CROP_NONE),
         mComposerOutput(false),
         mAutoframingOverride(ANDROID_CONTROL_AUTOFRAMING_OFF),
@@ -133,7 +133,7 @@
     return mId;
 }
 
-status_t Camera3Device::initializeCommonLocked() {
+status_t Camera3Device::initializeCommonLocked(sp<CameraProviderManager> manager) {
 
     /** Start up status tracker thread */
     mStatusTracker = new StatusTracker(this);
@@ -187,7 +187,7 @@
     /** Start up request queue thread */
     mRequestThread = createNewRequestThread(
             this, mStatusTracker, mInterface, sessionParamKeys,
-            mUseHalBufManager, mSupportCameraMute, mOverrideToPortrait,
+            mUseHalBufManager, mSupportCameraMute, mRotationOverride,
             mSupportZoomOverride);
     res = mRequestThread->run((std::string("C3Dev-") + mId + "-ReqQueue").c_str());
     if (res != OK) {
@@ -251,7 +251,8 @@
     mInjectionMethods = createCamera3DeviceInjectionMethods(this);
 
     /** Start watchdog thread */
-    mCameraServiceWatchdog = new CameraServiceWatchdog(mId, mCameraServiceProxyWrapper);
+    mCameraServiceWatchdog = new CameraServiceWatchdog(
+            manager->getProviderPids(), mId, mCameraServiceProxyWrapper);
     res = mCameraServiceWatchdog->run("CameraServiceWatchdog");
     if (res != OK) {
         SET_ERR_L("Unable to start camera service watchdog thread: %s (%d)",
@@ -612,10 +613,35 @@
 
     {
         lines = "    Last request sent:\n";
+        LatestRequestInfo lastRequestInfo = getLatestRequestInfoLocked();
+        // Print out output and input stream ids
+        if (flags::dumpsys_request_stream_ids()) {
+            if (lastRequestInfo.outputStreamIds.size() != 0) {
+                lines += "      Output Stream Ids:\n";
+                for (const auto &streamId: lastRequestInfo.outputStreamIds) {
+                    lines +=  "         " + std::to_string(streamId) + "\n";
+                }
+            }
+            if (lastRequestInfo.inputStreamId != -1) {
+                lines += "       Input Stream Id: " + std::to_string(lastRequestInfo.inputStreamId)
+                        + "\n";
+            }
+        }
+        // Keeping this write() outside the flagged if makes it easier while
+        // removing the flag.
+        write(fd, lines.c_str(), lines.size());
+        lines = "    Logical request settings:\n";
+        CameraMetadata lastRequestSettings = lastRequestInfo.requestSettings;
         write(fd, lines.c_str(), lines.size());
 
-        CameraMetadata lastRequest = getLatestRequestLocked();
-        lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6);
+        lastRequestSettings.dump(fd, /*verbosity=all info*/2, /*indentation*/6);
+        if (flags::dumpsys_request_stream_ids()) {
+            for (const auto& pair: lastRequestInfo.physicalRequestSettings) {
+                lines = "    Physical request settings for camera id " + pair.first + ":\n";
+                write(fd, lines.c_str(), lines.size());
+                pair.second.dump(fd, /*verbosity=all info*/2, /*indentation*/8);
+            }
+        }
     }
 
     if (dumpTemplates) {
@@ -720,7 +746,7 @@
 
 status_t Camera3Device::convertMetadataListToRequestListLocked(
         const List<const PhysicalCameraSettingsList> &metadataList,
-        const std::list<const SurfaceMap> &surfaceMaps,
+        const std::list<SurfaceMap> &surfaceMaps,
         bool repeating, nsecs_t requestTimeNs,
         RequestList *requestList) {
     if (requestList == NULL) {
@@ -730,7 +756,7 @@
 
     int32_t burstId = 0;
     List<const PhysicalCameraSettingsList>::const_iterator metadataIt = metadataList.begin();
-    std::list<const SurfaceMap>::const_iterator surfaceMapIt = surfaceMaps.begin();
+    std::list<SurfaceMap>::const_iterator surfaceMapIt = surfaceMaps.begin();
     for (; metadataIt != metadataList.end() && surfaceMapIt != surfaceMaps.end();
             ++metadataIt, ++surfaceMapIt) {
         sp<CaptureRequest> newRequest = setUpRequestLocked(*metadataIt, *surfaceMapIt);
@@ -778,14 +804,14 @@
     ATRACE_CALL();
 
     List<const PhysicalCameraSettingsList> requestsList;
-    std::list<const SurfaceMap> surfaceMaps;
+    std::list<SurfaceMap> surfaceMaps;
     convertToRequestList(requestsList, surfaceMaps, request);
 
     return captureList(requestsList, surfaceMaps, lastFrameNumber);
 }
 
 void Camera3Device::convertToRequestList(List<const PhysicalCameraSettingsList>& requestsList,
-        std::list<const SurfaceMap>& surfaceMaps,
+        std::list<SurfaceMap>& surfaceMaps,
         const CameraMetadata& request) {
     PhysicalCameraSettingsList requestList;
     requestList.push_back({getId(), request});
@@ -803,7 +829,7 @@
 
 status_t Camera3Device::submitRequestsHelper(
         const List<const PhysicalCameraSettingsList> &requests,
-        const std::list<const SurfaceMap> &surfaceMaps,
+        const std::list<SurfaceMap> &surfaceMaps,
         bool repeating,
         /*out*/
         int64_t *lastFrameNumber) {
@@ -851,7 +877,7 @@
 }
 
 status_t Camera3Device::captureList(const List<const PhysicalCameraSettingsList> &requestsList,
-                                    const std::list<const SurfaceMap> &surfaceMaps,
+                                    const std::list<SurfaceMap> &surfaceMaps,
                                     int64_t *lastFrameNumber) {
     ATRACE_CALL();
 
@@ -863,7 +889,7 @@
     ATRACE_CALL();
 
     List<const PhysicalCameraSettingsList> requestsList;
-    std::list<const SurfaceMap> surfaceMaps;
+    std::list<SurfaceMap> surfaceMaps;
     convertToRequestList(requestsList, surfaceMaps, request);
 
     return setStreamingRequestList(requestsList, /*surfaceMap*/surfaceMaps,
@@ -872,7 +898,7 @@
 
 status_t Camera3Device::setStreamingRequestList(
         const List<const PhysicalCameraSettingsList> &requestsList,
-        const std::list<const SurfaceMap> &surfaceMaps, int64_t *lastFrameNumber) {
+        const std::list<SurfaceMap> &surfaceMaps, int64_t *lastFrameNumber) {
     ATRACE_CALL();
 
     return submitRequestsHelper(requestsList, surfaceMaps, /*repeating*/true, lastFrameNumber);
@@ -1398,7 +1424,7 @@
                 request->mRotateAndCropAuto = false;
             }
 
-            overrideAutoRotateAndCrop(request, mOverrideToPortrait, mRotateAndCropOverride);
+            overrideAutoRotateAndCrop(request, mRotationOverride, mRotateAndCropOverride);
         }
 
         if (autoframingSessionKey) {
@@ -1415,6 +1441,21 @@
     return configureStreamsLocked(operatingMode, filteredParams);
 }
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+status_t Camera3Device::getInputSurface(sp<Surface> *surface) {
+    ATRACE_CALL();
+    Mutex::Autolock il(mInterfaceLock);
+    Mutex::Autolock l(mLock);
+
+    if (surface == NULL) {
+        return BAD_VALUE;
+    } else if (mInputStream == NULL) {
+        return INVALID_OPERATION;
+    }
+
+    return mInputStream->getInputSurface(surface);
+}
+#else
 status_t Camera3Device::getInputBufferProducer(
         sp<IGraphicBufferProducer> *producer) {
     ATRACE_CALL();
@@ -1429,6 +1470,7 @@
 
     return mInputStream->getInputBufferProducer(producer);
 }
+#endif
 
 status_t Camera3Device::createDefaultRequest(camera_request_template_t templateId,
         CameraMetadata *request) {
@@ -1606,7 +1648,7 @@
     bool signalPipelineDrain = false;
     if (!active &&
             (mUseHalBufManager ||
-                    (flags::session_hal_buf_manager() && mHalBufManagedStreamIds.size() != 0))) {
+                    (mHalBufManagedStreamIds.size() != 0))) {
         auto streamIds = mOutputStreams.getStreamIds();
         if (mStatus == STATUS_ACTIVE) {
             mRequestThread->signalPipelineDrain(streamIds);
@@ -1810,10 +1852,7 @@
         mSessionStatsBuilder.stopCounter();
     }
 
-    // Calculate expected duration for flush with additional buffer time in ms for watchdog
-    uint64_t maxExpectedDuration = ns2ms(getExpectedInFlightDuration() + kBaseGetBufferWait);
-    status_t res = mCameraServiceWatchdog->WATCH_CUSTOM_TIMER(mRequestThread->flush(),
-            maxExpectedDuration / kCycleLengthMs, kCycleLengthMs);
+    status_t res = mCameraServiceWatchdog->WATCH(mRequestThread->flush());
 
     return res;
 }
@@ -2378,46 +2417,15 @@
     return ret;
 }
 
-Camera3Device::RunThreadWithRealtimePriority::RunThreadWithRealtimePriority(int tid) : mTid(tid),
-        mPreviousPolicy(sched_getscheduler(tid)) {
-    if (flags::surface_ipc()) {
-        auto res = sched_getparam(mTid, &mPreviousParams);
-        if (res != OK) {
-            ALOGE("Can't retrieve thread scheduler parameters: %s (%d)",
-                    strerror(-res), res);
-            return;
-        }
-
-        struct sched_param param = {0};
-        param.sched_priority = kRequestThreadPriority;
-
-        res = sched_setscheduler(mTid, SCHED_FIFO, &param);
-        if (res != OK) {
-            ALOGW("Can't set realtime priority for thread: %s (%d)",
-                    strerror(-res), res);
-        } else {
-            ALOGD("Set real time priority for thread (tid %d)", mTid);
-            mPolicyBumped = true;
-        }
-    }
-}
-
-Camera3Device::RunThreadWithRealtimePriority::~RunThreadWithRealtimePriority() {
-    if (mPolicyBumped && flags::surface_ipc()) {
-        auto res = sched_setscheduler(mTid, mPreviousPolicy, &mPreviousParams);
-        if (res != OK) {
-            ALOGE("Can't set regular priority for thread: %s (%d)",
-                    strerror(-res), res);
-        } else {
-            ALOGD("Set regular priority for thread (tid %d)", mTid);
-        }
-    }
-}
 
 status_t Camera3Device::configureStreamsLocked(int operatingMode,
         const CameraMetadata& sessionParams, bool notifyRequestThread) {
     ATRACE_CALL();
     status_t res;
+    // Stream/surface setup can include a lot of binder IPC. Raise the
+    // thread priority when running the binder IPC heavy configuration
+    // sequence.
+    RunThreadWithRealtimePriority priorityBump;
 
     if (mStatus != STATUS_UNCONFIGURED && mStatus != STATUS_CONFIGURED) {
         CLOGE("Not idle");
@@ -2590,70 +2598,61 @@
     // It is possible that use hal buffer manager behavior was changed by the
     // configureStreams call.
     mUseHalBufManager = config.use_hal_buf_manager;
-    if (flags::session_hal_buf_manager()) {
-        bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
-        // It is possible that configureStreams() changed config.hal_buffer_managed_streams
-        mHalBufManagedStreamIds = config.hal_buffer_managed_streams;
+    bool prevSessionHalBufManager = (mHalBufManagedStreamIds.size() != 0);
+    // It is possible that configureStreams() changed config.hal_buffer_managed_streams
+    mHalBufManagedStreamIds = config.hal_buffer_managed_streams;
 
-        bool thisSessionHalBufManager = mHalBufManagedStreamIds.size() != 0;
+    bool thisSessionHalBufManager = mHalBufManagedStreamIds.size() != 0;
 
-        if (prevSessionHalBufManager && !thisSessionHalBufManager) {
-            mRequestBufferSM.deInit();
-        } else if (!prevSessionHalBufManager && thisSessionHalBufManager) {
-            res = mRequestBufferSM.initialize(mStatusTracker);
-            if (res != OK) {
-                SET_ERR_L("%s: Camera %s: RequestBuffer State machine couldn't be initialized!",
-                          __FUNCTION__, mId.c_str());
-                return res;
-            }
+    if (prevSessionHalBufManager && !thisSessionHalBufManager) {
+        mRequestBufferSM.deInit();
+    } else if (!prevSessionHalBufManager && thisSessionHalBufManager) {
+        res = mRequestBufferSM.initialize(mStatusTracker);
+        if (res != OK) {
+            SET_ERR_L("%s: Camera %s: RequestBuffer State machine couldn't be initialized!",
+                        __FUNCTION__, mId.c_str());
+            return res;
         }
-        mRequestThread->setHalBufferManagedStreams(mHalBufManagedStreamIds);
+    }
+    mRequestThread->setHalBufferManagedStreams(mHalBufManagedStreamIds);
+
+    // Finish all stream configuration immediately.
+    // TODO: Try to relax this later back to lazy completion, which should be
+    // faster
+
+    if (mInputStream != NULL && mInputStream->isConfiguring()) {
+        bool streamReConfigured = false;
+        res = mInputStream->finishConfiguration(&streamReConfigured);
+        if (res != OK) {
+            CLOGE("Can't finish configuring input stream %d: %s (%d)", mInputStream->getId(),
+                  strerror(-res), res);
+            cancelStreamsConfigurationLocked();
+            if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
+                return DEAD_OBJECT;
+            }
+            return BAD_VALUE;
+        }
+        if (streamReConfigured) {
+            mInterface->onStreamReConfigured(mInputStream->getId());
+        }
     }
 
-    {
-        // Stream/surface setup can include a lot of binder IPC. Raise the
-        // thread priority when running the binder IPC heavy configuration
-        // sequence.
-        RunThreadWithRealtimePriority priorityBump;
-
-        // Finish all stream configuration immediately.
-        // TODO: Try to relax this later back to lazy completion, which should be
-        // faster
-
-        if (mInputStream != NULL && mInputStream->isConfiguring()) {
+    for (size_t i = 0; i < mOutputStreams.size(); i++) {
+        sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
+        if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
             bool streamReConfigured = false;
-            res = mInputStream->finishConfiguration(&streamReConfigured);
+            res = outputStream->finishConfiguration(&streamReConfigured);
             if (res != OK) {
-                CLOGE("Can't finish configuring input stream %d: %s (%d)",
-                        mInputStream->getId(), strerror(-res), res);
+                CLOGE("Can't finish configuring output stream %d: %s (%d)", outputStream->getId(),
+                      strerror(-res), res);
                 cancelStreamsConfigurationLocked();
-                if ((res == NO_INIT || res == DEAD_OBJECT) && mInputStream->isAbandoned()) {
+                if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
                     return DEAD_OBJECT;
                 }
                 return BAD_VALUE;
             }
             if (streamReConfigured) {
-                mInterface->onStreamReConfigured(mInputStream->getId());
-            }
-        }
-
-        for (size_t i = 0; i < mOutputStreams.size(); i++) {
-            sp<Camera3OutputStreamInterface> outputStream = mOutputStreams[i];
-            if (outputStream->isConfiguring() && !outputStream->isConsumerConfigurationDeferred()) {
-                bool streamReConfigured = false;
-                res = outputStream->finishConfiguration(&streamReConfigured);
-                if (res != OK) {
-                    CLOGE("Can't finish configuring output stream %d: %s (%d)",
-                            outputStream->getId(), strerror(-res), res);
-                    cancelStreamsConfigurationLocked();
-                    if ((res == NO_INIT || res == DEAD_OBJECT) && outputStream->isAbandoned()) {
-                        return DEAD_OBJECT;
-                    }
-                    return BAD_VALUE;
-                }
-                if (streamReConfigured) {
-                    mInterface->onStreamReConfigured(outputStream->getId());
-                }
+                mInterface->onStreamReConfigured(outputStream->getId());
             }
         }
     }
@@ -2673,8 +2672,8 @@
     if (disableFifo != 1) {
         // Boost priority of request thread to SCHED_FIFO.
         pid_t requestThreadTid = mRequestThread->getTid();
-        res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), requestThreadTid,
-                kRequestThreadPriority);
+        res = SchedulingPolicyUtils::requestPriorityDirect(
+                getpid(), requestThreadTid, RunThreadWithRealtimePriority::kRequestThreadPriority);
         if (res != OK) {
             ALOGW("Can't set realtime priority for request processing thread: %s (%d)",
                     strerror(-res), res);
@@ -2949,13 +2948,13 @@
     camera3::flushInflightRequests(states);
 }
 
-CameraMetadata Camera3Device::getLatestRequestLocked() {
+Camera3Device::LatestRequestInfo Camera3Device::getLatestRequestInfoLocked() {
     ALOGV("%s", __FUNCTION__);
 
-    CameraMetadata retVal;
+    LatestRequestInfo retVal;
 
     if (mRequestThread != NULL) {
-        retVal = mRequestThread->getLatestRequest();
+        retVal = mRequestThread->getLatestRequestInfo();
     }
 
     return retVal;
@@ -3020,8 +3019,7 @@
 }
 
 bool Camera3Device::HalInterface::isHalBufferManagedStream(int32_t streamId) const {
-    return (mUseHalBufManager || (flags::session_hal_buf_manager() &&
-                                  contains(mHalBufManagedStreamIds, streamId)));
+    return (mUseHalBufManager || contains(mHalBufManagedStreamIds, streamId));
 }
 
 status_t Camera3Device::HalInterface::popInflightBuffer(
@@ -3079,7 +3077,7 @@
         sp<HalInterface> interface, const Vector<int32_t>& sessionParamKeys,
         bool useHalBufManager,
         bool supportCameraMute,
-        bool overrideToPortrait,
+        int rotationOverride,
         bool supportSettingsOverride) :
         Thread(/*canCallJava*/false),
         mParent(parent),
@@ -3113,7 +3111,7 @@
         mLatestSessionParams(sessionParamKeys.size()),
         mUseHalBufManager(useHalBufManager),
         mSupportCameraMute(supportCameraMute),
-        mOverrideToPortrait(overrideToPortrait),
+        mRotationOverride(rotationOverride),
         mSupportSettingsOverride(supportSettingsOverride) {
     mStatusId = statusTracker->addComponent("RequestThread");
     mVndkVersion = getVNDKVersionFromProp(__ANDROID_API_FUTURE__);
@@ -3525,30 +3523,40 @@
     if (halRequest.settings != nullptr) { // Don't update if they were unchanged
         Mutex::Autolock al(mLatestRequestMutex);
 
-        camera_metadata_t* cloned = clone_camera_metadata(halRequest.settings);
-        mLatestRequest.acquire(cloned);
+        // Fill in latest request and physical request
+        camera_metadata_t *cloned = clone_camera_metadata(halRequest.settings);
+        mLatestRequestInfo.requestSettings.acquire(cloned);
 
-        mLatestPhysicalRequest.clear();
+        mLatestRequestInfo.physicalRequestSettings.clear();
+        mLatestRequestInfo.outputStreamIds.clear();
         for (uint32_t i = 0; i < halRequest.num_physcam_settings; i++) {
             cloned = clone_camera_metadata(halRequest.physcam_settings[i]);
-            mLatestPhysicalRequest.emplace(halRequest.physcam_id[i],
-                    CameraMetadata(cloned));
+            mLatestRequestInfo.physicalRequestSettings.emplace(halRequest.physcam_id[i],
+                                           CameraMetadata(cloned));
         }
 
         if (parent != nullptr) {
             int32_t inputStreamId = -1;
             if (halRequest.input_buffer != nullptr) {
               inputStreamId = Camera3Stream::cast(halRequest.input_buffer->stream)->getId();
+              mLatestRequestInfo.inputStreamId = inputStreamId;
             }
 
+           for (size_t i = 0; i < halRequest.num_output_buffers; i++) {
+               int32_t outputStreamId =
+                       Camera3Stream::cast(halRequest.output_buffers[i].stream)->getId();
+               mLatestRequestInfo.outputStreamIds.emplace(outputStreamId);
+           }
+
             parent->monitorMetadata(TagMonitor::REQUEST,
                     halRequest.frame_number,
-                    0, mLatestRequest, mLatestPhysicalRequest, halRequest.output_buffers,
+                    0, mLatestRequestInfo.requestSettings,
+                    mLatestRequestInfo.physicalRequestSettings, halRequest.output_buffers,
                     halRequest.num_output_buffers, inputStreamId);
         }
     }
     if (parent != nullptr) {
-        parent->collectRequestStats(halRequest.frame_number, mLatestRequest);
+        parent->collectRequestStats(halRequest.frame_number, mLatestRequestInfo.requestSettings);
     }
 
     if (halRequest.settings != nullptr) {
@@ -3651,10 +3659,12 @@
         sp<CaptureRequest> captureRequest = nextRequest.captureRequest;
         captureRequest->mTestPatternChanged = overrideTestPattern(captureRequest);
         // Do not override rotate&crop for stream configurations that include
-        // SurfaceViews(HW_COMPOSER) output, unless mOverrideToPortrait is set.
+        // SurfaceViews(HW_COMPOSER) output, unless mRotationOverride is set.
         // The display rotation there will be compensated by NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY
-        captureRequest->mRotateAndCropChanged = (mComposerOutput && !mOverrideToPortrait) ? false :
-            overrideAutoRotateAndCrop(captureRequest);
+        using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
+        captureRequest->mRotateAndCropChanged =
+                (mComposerOutput && (mRotationOverride == ROTATION_OVERRIDE_NONE)) ?
+                        false : overrideAutoRotateAndCrop(captureRequest);
         captureRequest->mAutoframingChanged = overrideAutoframing(captureRequest);
         if (flags::inject_session_params()) {
             injectSessionParams(captureRequest, mInjectedSessionParams);
@@ -3706,19 +3716,18 @@
         cleanUpFailedRequests(/*sendRequestError*/ true);
         // Check if any stream is abandoned.
         checkAndStopRepeatingRequest();
+        // Inform waitUntilRequestProcessed thread of a failed request ID
+        wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
         return true;
     } else if (res != OK) {
         cleanUpFailedRequests(/*sendRequestError*/ false);
+        // Inform waitUntilRequestProcessed thread of a failed request ID
+        wakeupLatestRequest(/*failedRequestId*/true, latestRequestId);
         return false;
     }
 
     // Inform waitUntilRequestProcessed thread of a new request ID
-    {
-        Mutex::Autolock al(mLatestRequestMutex);
-
-        mLatestRequestId = latestRequestId;
-        mLatestRequestSignal.signal();
-    }
+    wakeupLatestRequest(/*failedRequestId*/false, latestRequestId);
 
     // Submit a batch of requests to HAL.
     // Use flush lock only when submitting multilple requests in a batch.
@@ -4172,8 +4181,7 @@
             }
         }
         bool passSurfaceMap =
-                mUseHalBufManager ||
-                        (flags::session_hal_buf_manager() && containsHalBufferManagedStream);
+                mUseHalBufManager || containsHalBufferManagedStream;
         auto expectedDurationInfo = calculateExpectedDurationRange(settings);
         res = parent->registerInFlight(halRequest->frame_number,
                 totalNumBuffers, captureRequest->mResultExtras,
@@ -4207,13 +4215,13 @@
     return OK;
 }
 
-CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
+Camera3Device::LatestRequestInfo Camera3Device::RequestThread::getLatestRequestInfo() const {
     ATRACE_CALL();
     Mutex::Autolock al(mLatestRequestMutex);
 
     ALOGV("RequestThread::%s", __FUNCTION__);
 
-    return mLatestRequest;
+    return mLatestRequestInfo;
 }
 
 bool Camera3Device::RequestThread::isStreamPending(
@@ -4289,7 +4297,7 @@
 
 void Camera3Device::RequestThread::signalPipelineDrain(const std::vector<int>& streamIds) {
     if (!mUseHalBufManager &&
-            (flags::session_hal_buf_manager() && mHalBufManagedStreamIds.size() == 0)) {
+            (mHalBufManagedStreamIds.size() == 0)) {
         ALOGE("%s called for camera device not supporting HAL buffer management", __FUNCTION__);
         return;
     }
@@ -4447,8 +4455,7 @@
             Camera3Stream *stream = Camera3Stream::cast((*outputBuffers)[i].stream);
             int32_t streamId = stream->getId();
             bool skipBufferForStream =
-                    mUseHalBufManager || (flags::session_hal_buf_manager() &&
-                            contains(mHalBufManagedStreamIds, streamId));
+                    mUseHalBufManager || (contains(mHalBufManagedStreamIds, streamId));
             if (skipBufferForStream) {
                 // No output buffer can be returned when using HAL buffer manager for its stream
                 continue;
@@ -4473,12 +4480,7 @@
                         hardware::camera2::ICameraDeviceCallbacks::ERROR_CAMERA_REQUEST,
                         captureRequest->mResultExtras);
             }
-            {
-                Mutex::Autolock al(mLatestRequestMutex);
-
-                mLatestFailedRequestId = captureRequest->mResultExtras.requestId;
-                mLatestRequestSignal.signal();
-            }
+            wakeupLatestRequest(/*failedRequestId*/true, captureRequest->mResultExtras.requestId);
         }
 
         // Remove yet-to-be submitted inflight request from inflightMap
@@ -4930,16 +4932,16 @@
 bool Camera3Device::RequestThread::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request) {
     ATRACE_CALL();
     Mutex::Autolock l(mTriggerMutex);
-    return Camera3Device::overrideAutoRotateAndCrop(request, this->mOverrideToPortrait,
+    return Camera3Device::overrideAutoRotateAndCrop(request, this->mRotationOverride,
             this->mRotateAndCropOverride);
 }
 
 bool Camera3Device::overrideAutoRotateAndCrop(const sp<CaptureRequest> &request,
-        bool overrideToPortrait,
+        int rotationOverride,
         camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride) {
     ATRACE_CALL();
 
-    if (overrideToPortrait) {
+    if (rotationOverride != hardware::ICameraService::ROTATION_OVERRIDE_NONE) {
         uint8_t rotateAndCrop_u8 = rotateAndCropOverride;
         CameraMetadata &metadata = request->mSettingsList.begin()->metadata;
         metadata.update(ANDROID_SCALER_ROTATE_AND_CROP,
@@ -5140,6 +5142,20 @@
     return OK;
 }
 
+void  Camera3Device::RequestThread::wakeupLatestRequest(
+        bool latestRequestFailed,
+        int32_t latestRequestId) {
+    Mutex::Autolock al(mLatestRequestMutex);
+
+    if (latestRequestFailed) {
+        mLatestFailedRequestId = latestRequestId;
+    } else {
+        mLatestRequestId = latestRequestId;
+    }
+    mLatestRequestSignal.signal();
+}
+
+
 /**
  * PreparerThread inner class methods
  */
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index d646886..3c45c1a 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -31,11 +31,13 @@
 #include <utils/Timers.h>
 
 #include <camera/CaptureResult.h>
+#include <gui/Flags.h>
 
 #include "CameraServiceWatchdog.h"
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 
 #include "common/CameraDeviceBase.h"
+#include "common/DepthPhotoProcessor.h"
 #include "device3/BufferUtils.h"
 #include "device3/StatusTracker.h"
 #include "device3/Camera3BufferManager.h"
@@ -88,7 +90,7 @@
 
     explicit Camera3Device(std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-            const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+            const std::string& id, bool overrideForPerfClass, int rotationOverride,
             bool legacyClient = false);
 
     virtual ~Camera3Device();
@@ -132,12 +134,12 @@
     // idle state
     status_t capture(CameraMetadata &request, int64_t *lastFrameNumber = NULL) override;
     status_t captureList(const List<const PhysicalCameraSettingsList> &requestsList,
-            const std::list<const SurfaceMap> &surfaceMaps,
+            const std::list<SurfaceMap> &surfaceMaps,
             int64_t *lastFrameNumber = NULL) override;
     status_t setStreamingRequest(const CameraMetadata &request,
             int64_t *lastFrameNumber = NULL) override;
     status_t setStreamingRequestList(const List<const PhysicalCameraSettingsList> &requestsList,
-            const std::list<const SurfaceMap> &surfaceMaps,
+            const std::list<SurfaceMap> &surfaceMaps,
             int64_t *lastFrameNumber = NULL) override;
     status_t clearStreamingRequest(int64_t *lastFrameNumber = NULL) override;
 
@@ -196,8 +198,12 @@
     status_t configureStreams(const CameraMetadata& sessionParams,
             int operatingMode =
             camera_stream_configuration_mode_t::CAMERA_STREAM_CONFIGURATION_NORMAL_MODE) override;
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    status_t getInputSurface(sp<Surface> *surface) override;
+#else
     status_t getInputBufferProducer(
             sp<IGraphicBufferProducer> *producer) override;
+#endif
 
     void getOfflineStreamIds(std::vector<int> *offlineStreamIds) override;
 
@@ -373,13 +379,11 @@
     static const size_t        kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
     static const nsecs_t       kMinInflightDuration = 5000000000; // 5 s
     static const nsecs_t       kBaseGetBufferWait = 3000000000; // 3 sec.
-    // SCHED_FIFO priority for request submission thread in HFR mode
-    static const int           kRequestThreadPriority = 1;
 
     struct                     RequestTrigger;
     // minimal jpeg buffer size: 256KB + blob header
-    static const ssize_t       kMinJpegBufferSize =
-            256 * 1024 + sizeof(aidl::android::hardware::camera::device::CameraBlob);
+    static const ssize_t       kMinJpegBufferSize = camera3::MIN_JPEG_BUFFER_SIZE +
+            sizeof(aidl::android::hardware::camera::device::CameraBlob);
     // Constant to use for stream ID when one doesn't exist
     static const int           NO_STREAM = -1;
 
@@ -705,17 +709,17 @@
 
     status_t convertMetadataListToRequestListLocked(
             const List<const PhysicalCameraSettingsList> &metadataList,
-            const std::list<const SurfaceMap> &surfaceMaps,
+            const std::list<SurfaceMap> &surfaceMaps,
             bool repeating, nsecs_t requestTimeNs,
             /*out*/
             RequestList *requestList);
 
     void convertToRequestList(List<const PhysicalCameraSettingsList>& requestsList,
-            std::list<const SurfaceMap>& surfaceMaps,
+            std::list<SurfaceMap>& surfaceMaps,
             const CameraMetadata& request);
 
     status_t submitRequestsHelper(const List<const PhysicalCameraSettingsList> &requestsList,
-                                  const std::list<const SurfaceMap> &surfaceMaps,
+                                  const std::list<SurfaceMap> &surfaceMaps,
                                   bool repeating,
                                   int64_t *lastFrameNumber = NULL);
 
@@ -727,7 +731,7 @@
      *
      * Must be called with mLock and mInterfaceLock held.
      */
-    status_t initializeCommonLocked();
+    status_t initializeCommonLocked(sp<CameraProviderManager> manager);
 
     /**
      * Update capture request list so that each batch size honors the batch_size_max report from
@@ -738,12 +742,19 @@
     virtual void applyMaxBatchSizeLocked(
             RequestList* requestList, const sp<camera3::Camera3OutputStreamInterface>& stream) = 0;
 
+    struct LatestRequestInfo {
+        CameraMetadata requestSettings;
+        std::unordered_map<std::string, CameraMetadata> physicalRequestSettings;
+        int32_t inputStreamId = -1;
+        std::set<int32_t> outputStreamIds;
+    };
+
     /**
      * Get the last request submitted to the hal by the request thread.
      *
      * Must be called with mLock held.
      */
-    virtual CameraMetadata getLatestRequestLocked();
+    virtual LatestRequestInfo getLatestRequestInfoLocked();
 
     virtual status_t injectionCameraInitialize(const std::string &injectCamId,
             sp<CameraProviderManager> manager) = 0;
@@ -879,7 +890,7 @@
 
     // Override rotate_and_crop control if needed
     static bool    overrideAutoRotateAndCrop(const sp<CaptureRequest> &request /*out*/,
-            bool overrideToPortrait,
+            int rotationOverride,
             camera_metadata_enum_android_scaler_rotate_and_crop_t rotateAndCropOverride);
 
     // Override auto framing control if needed
@@ -916,7 +927,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
         ~RequestThread();
 
@@ -994,7 +1005,7 @@
          * Get the latest request that was sent to the HAL
          * with process_capture_request.
          */
-        CameraMetadata getLatestRequest() const;
+        LatestRequestInfo getLatestRequestInfo() const;
 
         /**
          * Returns true if the stream is a target of any queued or repeating
@@ -1038,6 +1049,11 @@
             const sp<CaptureRequest> &request,
             const CameraMetadata& injectedSessionParams);
 
+        /**
+         * signal mLatestRequestmutex
+         **/
+        void wakeupLatestRequest(bool latestRequestFailed, int32_t latestRequestId);
+
       protected:
 
         virtual bool threadLoop();
@@ -1189,8 +1205,7 @@
         // android.request.id for latest process_capture_request
         int32_t            mLatestRequestId;
         int32_t            mLatestFailedRequestId;
-        CameraMetadata     mLatestRequest;
-        std::unordered_map<std::string, CameraMetadata> mLatestPhysicalRequest;
+        LatestRequestInfo mLatestRequestInfo;
 
         typedef KeyedVector<uint32_t/*tag*/, RequestTrigger> TriggerMap;
         Mutex              mTriggerMutex;
@@ -1226,7 +1241,7 @@
         bool               mUseHalBufManager = false;
         std::set<int32_t > mHalBufManagedStreamIds;
         const bool         mSupportCameraMute;
-        const bool         mOverrideToPortrait;
+        const bool         mRotationOverride;
         const bool         mSupportSettingsOverride;
         int32_t            mVndkVersion = -1;
     };
@@ -1237,7 +1252,7 @@
                 const Vector<int32_t>& /*sessionParamKeys*/,
                 bool /*useHalBufManager*/,
                 bool /*supportCameraMute*/,
-                bool /*overrideToPortrait*/,
+                int /*rotationOverride*/,
                 bool /*supportSettingsOverride*/) = 0;
 
     sp<RequestThread> mRequestThread;
@@ -1519,7 +1534,7 @@
 
     // Whether the camera framework overrides the device characteristics for
     // app compatibility reasons.
-    bool mOverrideToPortrait;
+    int mRotationOverride;
     camera_metadata_enum_android_scaler_rotate_and_crop_t mRotateAndCropOverride;
     bool mComposerOutput;
 
@@ -1630,34 +1645,6 @@
 
     void overrideStreamUseCaseLocked();
 
-    // An instance of this class will raise the scheduling policy of a given
-    // given thread to real time and keep it this way throughout the lifetime
-    // of the object. The thread scheduling policy will revert back to its original
-    // state after the instances is released. By default the implementation will
-    // raise the priority of the current thread unless clients explicitly specify
-    // another thread id.
-    // Client must avoid:
-    //  - Keeping an instance of this class for extended and long running operations.
-    //    This is only intended for short/temporarily priority bumps that mitigate
-    //    scheduling delays within critical camera paths.
-    //  - Allocating instances of this class on the memory heap unless clients have
-    //    complete control over the object lifetime. It is preferable to allocate
-    //    instances of this class on the stack instead.
-    //  - Nesting multiple instances of this class using the same default or same thread id.
-    class RunThreadWithRealtimePriority final {
-        public:
-            RunThreadWithRealtimePriority(int tid = gettid());
-            ~RunThreadWithRealtimePriority();
-
-            RunThreadWithRealtimePriority(const RunThreadWithRealtimePriority&) = delete;
-            RunThreadWithRealtimePriority& operator=(const RunThreadWithRealtimePriority&) = delete;
-
-        private:
-            int mTid;
-            int mPreviousPolicy;
-            bool mPolicyBumped = false;
-            struct sched_param mPreviousParams;
-    };
 
 }; // class Camera3Device
 
diff --git a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
index 22d2716..61c5a3b 100644
--- a/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
+++ b/services/camera/libcameraservice/device3/Camera3IOStreamBase.cpp
@@ -104,7 +104,7 @@
     lines << fmt::sprintf("      Total buffers: %zu, currently dequeued: %zu, "
             "currently cached: %zu\n", mTotalBufferCount, mHandoutTotalBufferCount,
             mCachedOutputBufferCount);
-    std::string linesStr = std::move(lines.str());
+    std::string linesStr = lines.str();
     write(fd, linesStr.c_str(), linesStr.size());
 
     Camera3Stream::dump(fd, args);
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 283322e..999f563 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -18,10 +18,12 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <camera/StringUtils.h>
+
 #include "Camera3InputStream.h"
 
 namespace android {
@@ -180,6 +182,21 @@
                                  /*output*/false, /*transform*/ -1);
 }
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+status_t Camera3InputStream::getInputSurfaceLocked(sp<Surface> *surface) {
+    ATRACE_CALL();
+
+    if (surface == NULL) {
+        return BAD_VALUE;
+    } else if (mSurface == NULL) {
+        ALOGE("%s: No input stream is configured", __FUNCTION__);
+        return INVALID_OPERATION;
+    }
+
+    *surface = mSurface;
+    return OK;
+}
+#else
 status_t Camera3InputStream::getInputBufferProducerLocked(
             sp<IGraphicBufferProducer> *producer) {
     ATRACE_CALL();
@@ -194,6 +211,7 @@
     *producer = mProducer;
     return OK;
 }
+#endif
 
 status_t Camera3InputStream::disconnectLocked() {
 
@@ -239,9 +257,15 @@
     mLastTimestamp = 0;
 
     if (mConsumer.get() == 0) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> bufferItemConsumer = new BufferItemConsumer(mUsage);
+        sp<IGraphicBufferProducer> producer =
+                bufferItemConsumer->getSurface()->getIGraphicBufferProducer();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         int minUndequeuedBuffers = 0;
         res = producer->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
@@ -271,11 +295,24 @@
             camera_stream::max_buffers : minBufs;
         // TODO: somehow set the total buffer count when producer connects?
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mConsumer = bufferItemConsumer;
+        mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
+        mConsumer->setMaxAcquiredBufferCount(mTotalBufferCount);
+
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+        mSurface = mConsumer->getSurface();
+#else
+        mProducer = mConsumer->getSurface()->getIGraphicBufferProducer();
+#endif // WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+
+#else
         mConsumer = new BufferItemConsumer(consumer, mUsage,
                                            mTotalBufferCount);
         mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
 
         mProducer = producer;
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         mConsumer->setBufferFreedListener(this);
     }
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.h b/services/camera/libcameraservice/device3/Camera3InputStream.h
index a99c364..b1603e5 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.h
@@ -18,6 +18,7 @@
 #define ANDROID_SERVERS_CAMERA3_INPUT_STREAM_H
 
 #include <utils/RefBase.h>
+#include <gui/Flags.h>
 #include <gui/Surface.h>
 #include <gui/BufferItemConsumer.h>
 
@@ -50,7 +51,11 @@
   private:
 
     sp<BufferItemConsumer> mConsumer;
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    sp<Surface> mSurface;
+#else
     sp<IGraphicBufferProducer> mProducer;
+#endif
     Vector<BufferItem> mBuffersInFlight;
 
     static const std::string FAKE_ID;
@@ -75,8 +80,12 @@
     virtual status_t getInputBufferLocked(camera_stream_buffer *buffer, Size *size);
     virtual status_t returnInputBufferLocked(
             const camera_stream_buffer &buffer);
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    virtual status_t getInputSurfaceLocked(sp<Surface> *surface);
+#else
     virtual status_t getInputBufferProducerLocked(
             sp<IGraphicBufferProducer> *producer);
+#endif
     virtual status_t disconnectLocked();
 
     virtual status_t configureQueueLocked();
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3cd4543..83c8a38 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -555,8 +555,8 @@
     // Configure consumer-side ANativeWindow interface. The listener may be used
     // to notify buffer manager (if it is used) of the returned buffers.
     res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
-            /*reportBufferRemoval*/true,
-            /*listener*/mBufferProducerListener);
+            /*listener*/mBufferProducerListener,
+            /*reportBufferRemoval*/true);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mId);
@@ -687,11 +687,7 @@
         }
     }
 
-    if (flags::surface_ipc()) {
-        res = mConsumer->setMaxDequeuedBufferCount(mTotalBufferCount - maxConsumerBuffers);
-    } else {
-        res = native_window_set_buffer_count(mConsumer.get(), mTotalBufferCount);
-    }
+    res = mConsumer->setMaxDequeuedBufferCount(mTotalBufferCount - maxConsumerBuffers);
     if (res != OK) {
         ALOGE("%s: Unable to set buffer count for stream %d",
                 __FUNCTION__, mId);
@@ -732,7 +728,11 @@
         if (res == OK) {
             // Disable buffer allocation for this BufferQueue, buffer manager will take over
             // the buffer allocation responsibility.
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_PLATFORM_API_IMPROVEMENTS)
+            mConsumer->allowAllocation(false);
+#else
             mConsumer->getIGraphicBufferProducer()->allowAllocation(false);
+#endif
             mUseBufferManager = true;
         } else {
             ALOGE("%s: Unable to register stream %d to camera3 buffer manager, "
@@ -1031,7 +1031,7 @@
 status_t Camera3OutputStream::getEndpointUsageForSurface(uint64_t *usage,
         const sp<Surface>& surface) {
     bool internalConsumer = (mConsumer.get() != nullptr) && (mConsumer == surface);
-    if (mConsumerUsageCachedValue.has_value() && flags::surface_ipc() && internalConsumer) {
+    if (mConsumerUsageCachedValue.has_value() && internalConsumer) {
         *usage = mConsumerUsageCachedValue.value();
         return OK;
     }
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 8a93ed8..f8b78c1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -193,6 +193,7 @@
             virtual void onBufferReleased();
             virtual bool needsReleaseNotify() { return mNeedsReleaseNotify; }
             virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& buffers);
+            virtual void onBufferDetached(int /*slot*/) override {};
 
         private:
             wp<Camera3OutputStream> mParent;
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
index 89e08a1..62226e1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.cpp
@@ -36,6 +36,7 @@
 #include <utils/SortedVector.h>
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 
 #include <android/hardware/camera/device/3.4/ICameraDeviceCallback.h>
@@ -675,8 +676,9 @@
                     states.listener->notifyPhysicalCameraChange(physicalId);
                 }
                 states.activePhysicalId = physicalId;
-
-                if (!states.legacyClient && !states.overrideToPortrait) {
+                using hardware::ICameraService::ROTATION_OVERRIDE_NONE;
+                if (!states.legacyClient &&
+                        states.rotationOverride == ROTATION_OVERRIDE_NONE) {
                     auto deviceInfo = states.physicalDeviceInfoMap.find(physicalId);
                     if (deviceInfo != states.physicalDeviceInfoMap.end()) {
                         auto orientation = deviceInfo->second.find(ANDROID_SENSOR_ORIENTATION);
@@ -892,8 +894,7 @@
 
         if (outputBuffers[i].buffer == nullptr) {
             if (!useHalBufManager &&
-                    !(flags::session_hal_buf_manager() &&
-                            contains(halBufferManagedStreams, streamId))) {
+                    !contains(halBufferManagedStreams, streamId)) {
                 // With HAL buffer management API, HAL sometimes will have to return buffers that
                 // has not got a output buffer handle filled yet. This is though illegal if HAL
                 // buffer management API is not being used.
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtils.h b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
index 75864d7..21965f5 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtils.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtils.h
@@ -154,7 +154,7 @@
         bool legacyClient;
         nsecs_t& minFrameDuration;
         bool& isFixedFps;
-        bool overrideToPortrait;
+        int rotationOverride;
         std::string &activePhysicalId;
     };
 
diff --git a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
index aca7a67..2d75d03 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputUtilsTemplated.h
@@ -212,8 +212,7 @@
         bool noBufferReturned = false;
         buffer_handle_t *buffer = nullptr;
         if (states.useHalBufManager ||
-                (flags::session_hal_buf_manager() &&
-                        contains(states.halBufManagedStreamIds, bSrc.streamId))) {
+                contains(states.halBufManagedStreamIds, bSrc.streamId)) {
             // This is suspicious most of the time but can be correct during flush where HAL
             // has to return capture result before a buffer is requested
             if (bSrc.bufferId == BUFFER_ID_NO_BUFFER) {
@@ -303,8 +302,7 @@
 
     for (const auto& buf : buffers) {
         if (!states.useHalBufManager &&
-            !(flags::session_hal_buf_manager() &&
-             contains(states.halBufManagedStreamIds, buf.streamId))) {
+            !contains(states.halBufManagedStreamIds, buf.streamId)) {
             ALOGE("%s: Camera %s does not support HAL buffer management for stream id %d",
                   __FUNCTION__, states.cameraId.c_str(), buf.streamId);
             return;
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 485f3f0..187bd93 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -18,6 +18,8 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include "Flags.h"
+
 #include "Camera3SharedOutputStream.h"
 
 namespace android {
@@ -59,7 +61,11 @@
 status_t Camera3SharedOutputStream::connectStreamSplitterLocked() {
     status_t res = OK;
 
-    mStreamSplitter = new Camera3StreamSplitter(mUseHalBufManager);
+#if USE_NEW_STREAM_SPLITTER
+    mStreamSplitter = sp<Camera3StreamSplitter>::make(mUseHalBufManager);
+#else
+    mStreamSplitter = sp<DeprecatedCamera3StreamSplitter>::make(mUseHalBufManager);
+#endif  // USE_NEW_STREAM_SPLITTER
 
     uint64_t usage = 0;
     getEndpointUsage(&usage);
@@ -90,7 +96,11 @@
     // Attach the buffer to the splitter output queues. This could block if
     // the output queue doesn't have any empty slot. So unlock during the course
     // of attachBufferToOutputs.
+#if USE_NEW_STREAM_SPLITTER
     sp<Camera3StreamSplitter> splitter = mStreamSplitter;
+#else
+    sp<DeprecatedCamera3StreamSplitter> splitter = mStreamSplitter;
+#endif  // USE_NEW_STREAM_SPLITTER
     mLock.unlock();
     res = splitter->attachBufferToOutputs(anb, surface_ids);
     mLock.lock();
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
index 818ce17..ae11507 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.h
@@ -18,9 +18,17 @@
 #define ANDROID_SERVERS_CAMERA3_SHARED_OUTPUT_STREAM_H
 
 #include <array>
-#include "Camera3StreamSplitter.h"
+
+#include "Flags.h"
+
 #include "Camera3OutputStream.h"
 
+#if USE_NEW_STREAM_SPLITTER
+#include "Camera3StreamSplitter.h"
+#else
+#include "deprecated/DeprecatedCamera3StreamSplitter.h"
+#endif  // USE_NEW_STREAM_SPLITTER
+
 namespace android {
 
 namespace camera3 {
@@ -106,8 +114,11 @@
      * The Camera3StreamSplitter object this stream uses for stream
      * sharing.
      */
+#if USE_NEW_STREAM_SPLITTER
     sp<Camera3StreamSplitter> mStreamSplitter;
-
+#else
+    sp<DeprecatedCamera3StreamSplitter> mStreamSplitter;
+#endif  // USE_NEW_STREAM_SPLITTER
     /**
      * Initialize stream splitter.
      */
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 4934203..ae76e60 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -29,6 +29,9 @@
 #include "ui/GraphicBufferMapper.h"
 
 #include <cutils/properties.h>
+#include <com_android_internal_camera_flags.h>
+
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 
@@ -388,6 +391,10 @@
             mOldDataSpace == camera_stream::data_space &&
             mOldFormat == camera_stream::format) {
         mState = STATE_CONFIGURED;
+        if (flags::enable_stream_reconfiguration_for_unchanged_streams()
+                && streamReconfigured != nullptr) {
+            *streamReconfigured = true;
+        }
         return OK;
     }
 
@@ -864,12 +871,21 @@
     return res;
 }
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+status_t Camera3Stream::getInputSurface(sp<Surface> *surface) {
+    ATRACE_CALL();
+    Mutex::Autolock l(mLock);
+
+    return getInputSurfaceLocked(surface);
+}
+#else
 status_t Camera3Stream::getInputBufferProducer(sp<IGraphicBufferProducer> *producer) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
 
     return getInputBufferProducerLocked(producer);
 }
+#endif
 
 void Camera3Stream::fireBufferRequestForFrameNumber(uint64_t frameNumber,
         const CameraMetadata& settings) {
@@ -983,10 +999,17 @@
     ALOGE("%s: This type of stream does not support input", __FUNCTION__);
     return INVALID_OPERATION;
 }
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+status_t Camera3Stream::getInputSurfaceLocked(sp<Surface>*) {
+    ALOGE("%s: This type of stream does not support input", __FUNCTION__);
+    return INVALID_OPERATION;
+}
+#else
 status_t Camera3Stream::getInputBufferProducerLocked(sp<IGraphicBufferProducer>*) {
     ALOGE("%s: This type of stream does not support input", __FUNCTION__);
     return INVALID_OPERATION;
 }
+#endif
 
 void Camera3Stream::addBufferListener(
         wp<Camera3StreamBufferListener> listener) {
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index ccd1044..1519ada 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA3_STREAM_H
 #define ANDROID_SERVERS_CAMERA3_STREAM_H
 
+#include <gui/Flags.h>
 #include <gui/Surface.h>
 #include <utils/RefBase.h>
 #include <utils/String16.h>
@@ -382,9 +383,13 @@
      */
     status_t         returnInputBuffer(const camera_stream_buffer &buffer);
 
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    status_t         getInputSurface(sp<Surface> *producer);
+#else
     // get the buffer producer of the input buffer queue.
     // only apply to input streams.
     status_t         getInputBufferProducer(sp<IGraphicBufferProducer> *producer);
+#endif
 
     /**
      * Whether any of the stream's buffers are currently in use by the HAL,
@@ -534,8 +539,12 @@
     virtual status_t returnInputBufferLocked(
             const camera_stream_buffer &buffer);
     virtual bool     hasOutstandingBuffersLocked() const = 0;
+#if WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
+    virtual status_t getInputSurfaceLocked(sp<Surface> *surface);
+#else
     // Get the buffer producer of the input buffer queue. Only apply to input streams.
     virtual status_t getInputBufferProducerLocked(sp<IGraphicBufferProducer> *producer);
+#endif
 
     // Can return -ENOTCONN when we are already disconnected (not an error)
     virtual status_t disconnectLocked() = 0;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 4df8193..0786622 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H
 #define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H
 
+#include <gui/Flags.h>
 #include <utils/RefBase.h>
 
 #include <camera/camera2/OutputConfiguration.h>
@@ -435,12 +436,14 @@
      */
     virtual status_t returnInputBuffer(const camera_stream_buffer &buffer) = 0;
 
+#if !WB_CAMERA3_AND_PROCESSORS_WITH_DEPENDENCIES
     /**
      * Get the buffer producer of the input buffer queue.
      *
      * This method only applies to input streams.
      */
     virtual status_t getInputBufferProducer(sp<IGraphicBufferProducer> *producer) = 0;
+#endif
 
     /**
      * Whether any of the stream's buffers are currently in use by the HAL,
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 255b4f2..7090545 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -14,31 +14,39 @@
  * limitations under the License.
  */
 
-#include <inttypes.h>
-
 #define LOG_TAG "Camera3StreamSplitter"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <binder/ProcessState.h>
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/BufferQueue.h>
 #include <gui/IGraphicBufferConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
-#include <gui/BufferQueue.h>
 #include <gui/Surface.h>
-#include <camera/StringUtils.h>
-
+#include <system/window.h>
 #include <ui/GraphicBuffer.h>
-
-#include <binder/ProcessState.h>
-
 #include <utils/Trace.h>
 
 #include <cutils/atomic.h>
+#include <inttypes.h>
+#include <algorithm>
+#include <cstdint>
+#include <memory>
 
 #include "Camera3Stream.h"
+#include "Flags.h"
 
 #include "Camera3StreamSplitter.h"
 
+// We're relying on a large number of yet-to-be-fully-launched flag dependencies
+// here. So instead of flagging each one, we flag the entire implementation to
+// improve legibility.
+#if USE_NEW_STREAM_SPLITTER
+
 namespace android {
 
 status_t Camera3StreamSplitter::connect(const std::unordered_map<size_t, sp<Surface>> &surfaces,
@@ -54,7 +62,7 @@
     Mutex::Autolock lock(mMutex);
     status_t res = OK;
 
-    if (mOutputs.size() > 0 || mConsumer != nullptr) {
+    if (mOutputSurfaces.size() > 0 || mBufferItemConsumer != nullptr) {
         SP_LOGE("%s: already connected", __FUNCTION__);
         return BAD_VALUE;
     }
@@ -81,33 +89,43 @@
         }
     }
 
-    // Create BufferQueue for input
-    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
-
     // Allocate 1 extra buffer to handle the case where all buffers are detached
     // from input, and attached to the outputs. In this case, the input queue's
     // dequeueBuffer can still allocate 1 extra buffer before being blocked by
     // the output's attachBuffer().
     mMaxConsumerBuffers++;
-    mBufferItemConsumer = new BufferItemConsumer(mConsumer, consumerUsage, mMaxConsumerBuffers);
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mBufferItemConsumer = sp<BufferItemConsumer>::make(consumerUsage, mMaxConsumerBuffers);
+    mSurface = mBufferItemConsumer->getSurface();
+#else
+    // Create BufferQueue for input
+    sp<IGraphicBufferProducer> bqProducer;
+    sp<IGraphicBufferConsumer> bqConsumer;
+    BufferQueue::createBufferQueue(&bqProducer, &bqConsumer);
+
+    mBufferItemConsumer = new BufferItemConsumer(bqConsumer, consumerUsage, mMaxConsumerBuffers);
+    mSurface = new Surface(bqProducer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
     if (mBufferItemConsumer == nullptr) {
         return NO_MEMORY;
     }
-    mConsumer->setConsumerName(toString8(mConsumerName));
+    mBufferItemConsumer->setName(toString8(mConsumerName));
 
-    *consumer = new Surface(mProducer);
+    *consumer = mSurface;
     if (*consumer == nullptr) {
         return NO_MEMORY;
     }
 
-    res = mProducer->setAsyncMode(true);
+    res = mSurface->setAsyncMode(true);
     if (res != OK) {
         SP_LOGE("%s: Failed to enable input queue async mode: %s(%d)", __FUNCTION__,
                 strerror(-res), res);
         return res;
     }
 
-    res = mConsumer->consumerConnect(this, /* controlledByApp */ false);
+    mBufferItemConsumer->setFrameAvailableListener(this);
 
     mWidth = width;
     mHeight = height;
@@ -128,25 +146,19 @@
     ATRACE_CALL();
     Mutex::Autolock lock(mMutex);
 
-    for (auto& notifier : mNotifiers) {
-        sp<IGraphicBufferProducer> producer = notifier.first;
-        sp<OutputListener> listener = notifier.second;
-        IInterface::asBinder(producer)->unlinkToDeath(listener);
-    }
     mNotifiers.clear();
 
-    for (auto& output : mOutputs) {
+    for (auto& output : mOutputSurfaces) {
         if (output.second != nullptr) {
             output.second->disconnect(NATIVE_WINDOW_API_CAMERA);
         }
     }
-    mOutputs.clear();
     mOutputSurfaces.clear();
-    mOutputSlots.clear();
+    mHeldBuffers.clear();
     mConsumerBufferCount.clear();
 
-    if (mConsumer.get() != nullptr) {
-        mConsumer->consumerDisconnect();
+    if (mBufferItemConsumer != nullptr) {
+        mBufferItemConsumer->abandon();
     }
 
     if (mBuffers.size() > 0) {
@@ -178,7 +190,7 @@
     }
 
     if (mMaxConsumerBuffers > mAcquiredInputBuffers) {
-        res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+        res = mBufferItemConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
     }
 
     return res;
@@ -196,7 +208,7 @@
         return BAD_VALUE;
     }
 
-    if (mOutputs[surfaceId] != nullptr) {
+    if (mOutputSurfaces[surfaceId] != nullptr) {
         SP_LOGE("%s: surfaceId: %u already taken!", __FUNCTION__, (unsigned) surfaceId);
         return BAD_VALUE;
     }
@@ -215,11 +227,9 @@
         return res;
     }
 
-    sp<IGraphicBufferProducer> gbp = outputQueue->getIGraphicBufferProducer();
     // Connect to the buffer producer
-    sp<OutputListener> listener(new OutputListener(this, gbp));
-    IInterface::asBinder(gbp)->linkToDeath(listener);
-    res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener);
+    sp<OutputListener> listener = sp<OutputListener>::make(this, outputQueue);
+    res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener, /* reportBufferRemoval */ false);
     if (res != NO_ERROR) {
         SP_LOGE("addOutput: failed to connect (%d)", res);
         return res;
@@ -261,22 +271,21 @@
         outputQueue->setDequeueTimeout(timeout);
     }
 
-    res = gbp->allowAllocation(false);
+    res = outputQueue->allowAllocation(false);
     if (res != OK) {
         SP_LOGE("%s: Failed to turn off allocation for outputQueue", __FUNCTION__);
         return res;
     }
 
     // Add new entry into mOutputs
-    mOutputs[surfaceId] = gbp;
     mOutputSurfaces[surfaceId] = outputQueue;
     mConsumerBufferCount[surfaceId] = maxConsumerBuffers;
     if (mConsumerBufferCount[surfaceId] > mMaxHalBuffers) {
         SP_LOGW("%s: Consumer buffer count %zu larger than max. Hal buffers: %zu", __FUNCTION__,
                 mConsumerBufferCount[surfaceId], mMaxHalBuffers);
     }
-    mNotifiers[gbp] = listener;
-    mOutputSlots[gbp] = std::make_unique<OutputSlots>(totalBufferCount);
+    mNotifiers[outputQueue] = listener;
+    mHeldBuffers[outputQueue] = std::make_unique<HeldBuffers>(totalBufferCount);
 
     mMaxConsumerBuffers += maxConsumerBuffers;
     return NO_ERROR;
@@ -293,7 +302,7 @@
     }
 
     if (mAcquiredInputBuffers < mMaxConsumerBuffers) {
-        res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+        res = mBufferItemConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
         if (res != OK) {
             SP_LOGE("%s: setMaxAcquiredBufferCount failed %d", __FUNCTION__, res);
             return res;
@@ -304,70 +313,54 @@
 }
 
 status_t Camera3StreamSplitter::removeOutputLocked(size_t surfaceId) {
-    if (mOutputs[surfaceId] == nullptr) {
+    if (mOutputSurfaces[surfaceId] == nullptr) {
         SP_LOGE("%s: output surface is not present!", __FUNCTION__);
         return BAD_VALUE;
     }
 
-    sp<IGraphicBufferProducer> gbp = mOutputs[surfaceId];
+    sp<Surface> surface = mOutputSurfaces[surfaceId];
     //Search and decrement the ref. count of any buffers that are
     //still attached to the removed surface.
     std::vector<uint64_t> pendingBufferIds;
-    auto& outputSlots = *mOutputSlots[gbp];
-    for (size_t i = 0; i < outputSlots.size(); i++) {
-        if (outputSlots[i] != nullptr) {
-            pendingBufferIds.push_back(outputSlots[i]->getId());
-            auto rc = gbp->detachBuffer(i);
-            if (rc != NO_ERROR) {
-                //Buffers that fail to detach here will be scheduled for detach in the
-                //input buffer queue and the rest of the registered outputs instead.
-                //This will help ensure that camera stops accessing buffers that still
-                //can get referenced by the disconnected output.
-                mDetachedBuffers.emplace(outputSlots[i]->getId());
-            }
+
+    // TODO: can we simplify this to just use the tracker?
+    for (const auto& buffer : (*mHeldBuffers[surface])) {
+        pendingBufferIds.push_back(buffer->getId());
+        auto rc = surface->detachBuffer(buffer);
+        if (rc != NO_ERROR) {
+            // Buffers that fail to detach here will be scheduled for detach in the
+            // input buffer queue and the rest of the registered outputs instead.
+            // This will help ensure that camera stops accessing buffers that still
+            // can get referenced by the disconnected output.
+            mDetachedBuffers.emplace(buffer->getId());
         }
     }
-    mOutputs[surfaceId] = nullptr;
     mOutputSurfaces[surfaceId] = nullptr;
-    mOutputSlots[gbp] = nullptr;
+    mHeldBuffers[surface] = nullptr;
     for (const auto &id : pendingBufferIds) {
         decrementBufRefCountLocked(id, surfaceId);
     }
 
-    auto res = IInterface::asBinder(gbp)->unlinkToDeath(mNotifiers[gbp]);
-    if (res != OK) {
-        SP_LOGE("%s: Failed to unlink producer death listener: %d ", __FUNCTION__, res);
-        return res;
-    }
-
-    res = gbp->disconnect(NATIVE_WINDOW_API_CAMERA);
+    status_t res = surface->disconnect(NATIVE_WINDOW_API_CAMERA);
     if (res != OK) {
         SP_LOGE("%s: Unable disconnect from producer interface: %d ", __FUNCTION__, res);
         return res;
     }
 
-    mNotifiers[gbp] = nullptr;
+    mNotifiers[surface] = nullptr;
     mMaxConsumerBuffers -= mConsumerBufferCount[surfaceId];
     mConsumerBufferCount[surfaceId] = 0;
 
     return res;
 }
 
-status_t Camera3StreamSplitter::outputBufferLocked(const sp<IGraphicBufferProducer>& output,
+status_t Camera3StreamSplitter::outputBufferLocked(const sp<Surface>& output,
         const BufferItem& bufferItem, size_t surfaceId) {
     ATRACE_CALL();
     status_t res;
-    IGraphicBufferProducer::QueueBufferInput queueInput(
-            bufferItem.mTimestamp, bufferItem.mIsAutoTimestamp,
-            bufferItem.mDataSpace, bufferItem.mCrop,
-            static_cast<int32_t>(bufferItem.mScalingMode),
-            bufferItem.mTransform, bufferItem.mFence);
-
-    IGraphicBufferProducer::QueueBufferOutput queueOutput;
 
     uint64_t bufferId = bufferItem.mGraphicBuffer->getId();
     const BufferTracker& tracker = *(mBuffers[bufferId]);
-    int slot = getSlotForOutputLocked(output, tracker.getBuffer());
 
     if (mOutputSurfaces[surfaceId] != nullptr) {
         sp<ANativeWindow> anw = mOutputSurfaces[surfaceId];
@@ -377,19 +370,26 @@
         SP_LOGE("%s: Invalid surface id: %zu!", __FUNCTION__, surfaceId);
     }
 
+    output->setBuffersTimestamp(bufferItem.mTimestamp);
+    output->setBuffersDataSpace(static_cast<ui::Dataspace>(bufferItem.mDataSpace));
+    output->setCrop(&bufferItem.mCrop);
+    output->setScalingMode(bufferItem.mScalingMode);
+    output->setBuffersTransform(bufferItem.mTransform);
+
     // In case the output BufferQueue has its own lock, if we hold splitter lock while calling
     // queueBuffer (which will try to acquire the output lock), the output could be holding its
     // own lock calling releaseBuffer (which  will try to acquire the splitter lock), running into
     // circular lock situation.
     mMutex.unlock();
-    res = output->queueBuffer(slot, queueInput, &queueOutput);
+    SurfaceQueueBufferOutput queueBufferOutput;
+    res = output->queueBuffer(bufferItem.mGraphicBuffer, bufferItem.mFence, &queueBufferOutput);
     mMutex.lock();
 
-    SP_LOGV("%s: Queuing buffer to buffer queue %p slot %d returns %d",
-            __FUNCTION__, output.get(), slot, res);
-    //During buffer queue 'mMutex' is not held which makes the removal of
-    //"output" possible. Check whether this is the case and return.
-    if (mOutputSlots[output] == nullptr) {
+    SP_LOGV("%s: Queuing buffer to buffer queue %p bufferId %" PRIu64 " returns %d", __FUNCTION__,
+            output.get(), bufferId, res);
+    // During buffer queue 'mMutex' is not held which makes the removal of
+    // "output" possible. Check whether this is the case and return.
+    if (mOutputSurfaces[surfaceId] == nullptr) {
         return res;
     }
     if (res != OK) {
@@ -407,7 +407,7 @@
     // If the queued buffer replaces a pending buffer in the async
     // queue, no onBufferReleased is called by the buffer queue.
     // Proactively trigger the callback to avoid buffer loss.
-    if (queueOutput.bufferReplaced) {
+    if (queueBufferOutput.bufferReplaced) {
         onBufferReplacedLocked(output, surfaceId);
     }
 
@@ -445,52 +445,32 @@
     auto tracker = std::make_unique<BufferTracker>(gb, surface_ids);
 
     for (auto& surface_id : surface_ids) {
-        sp<IGraphicBufferProducer>& gbp = mOutputs[surface_id];
-        if (gbp.get() == nullptr) {
+        sp<Surface>& surface = mOutputSurfaces[surface_id];
+        if (surface.get() == nullptr) {
             //Output surface got likely removed by client.
             continue;
         }
-        int slot = getSlotForOutputLocked(gbp, gb);
-        if (slot != BufferItem::INVALID_BUFFER_SLOT) {
-            //Buffer is already attached to this output surface.
-            continue;
-        }
+
         //Temporarly Unlock the mutex when trying to attachBuffer to the output
         //queue, because attachBuffer could block in case of a slow consumer. If
         //we block while holding the lock, onFrameAvailable and onBufferReleased
         //will block as well because they need to acquire the same lock.
         mMutex.unlock();
-        res = gbp->attachBuffer(&slot, gb);
+        res = surface->attachBuffer(anb);
         mMutex.lock();
         if (res != OK) {
-            SP_LOGE("%s: Cannot attachBuffer from GraphicBufferProducer %p: %s (%d)",
-                    __FUNCTION__, gbp.get(), strerror(-res), res);
+            SP_LOGE("%s: Cannot attachBuffer from GraphicBufferProducer %p: %s (%d)", __FUNCTION__,
+                    surface.get(), strerror(-res), res);
             // TODO: might need to detach/cleanup the already attached buffers before return?
             return res;
         }
-        if ((slot < 0) || (slot > BufferQueue::NUM_BUFFER_SLOTS)) {
-            SP_LOGE("%s: Slot received %d either bigger than expected maximum %d or negative!",
-                    __FUNCTION__, slot, BufferQueue::NUM_BUFFER_SLOTS);
-            return BAD_VALUE;
-        }
         //During buffer attach 'mMutex' is not held which makes the removal of
         //"gbp" possible. Check whether this is the case and continue.
-        if (mOutputSlots[gbp] == nullptr) {
+        if (mHeldBuffers[surface] == nullptr) {
             continue;
         }
-        auto& outputSlots = *mOutputSlots[gbp];
-        if (static_cast<size_t> (slot + 1) > outputSlots.size()) {
-            outputSlots.resize(slot + 1);
-        }
-        if (outputSlots[slot] != nullptr) {
-            // If the buffer is attached to a slot which already contains a buffer,
-            // the previous buffer will be removed from the output queue. Decrement
-            // the reference count accordingly.
-            decrementBufRefCountLocked(outputSlots[slot]->getId(), surface_id);
-        }
-        SP_LOGV("%s: Attached buffer %p to slot %d on output %p.",__FUNCTION__, gb.get(),
-                slot, gbp.get());
-        outputSlots[slot] = gb;
+        mHeldBuffers[surface]->insert(gb);
+        SP_LOGV("%s: Attached buffer %p on output %p.", __FUNCTION__, gb.get(), surface.get());
     }
 
     mBuffers[bufferId] = std::move(tracker);
@@ -504,25 +484,14 @@
 
     // Acquire and detach the buffer from the input
     BufferItem bufferItem;
-    status_t res = mConsumer->acquireBuffer(&bufferItem, /* presentWhen */ 0);
+    status_t res = mBufferItemConsumer->acquireBuffer(&bufferItem, /* presentWhen */ 0);
     if (res != NO_ERROR) {
         SP_LOGE("%s: Acquiring buffer from input failed (%d)", __FUNCTION__, res);
         mOnFrameAvailableRes.store(res);
         return;
     }
 
-    uint64_t bufferId;
-    if (bufferItem.mGraphicBuffer != nullptr) {
-        mInputSlots[bufferItem.mSlot] = bufferItem;
-    } else if (bufferItem.mAcquireCalled) {
-        bufferItem.mGraphicBuffer = mInputSlots[bufferItem.mSlot].mGraphicBuffer;
-        mInputSlots[bufferItem.mSlot].mFrameNumber = bufferItem.mFrameNumber;
-    } else {
-        SP_LOGE("%s: Invalid input graphic buffer!", __FUNCTION__);
-        mOnFrameAvailableRes.store(BAD_VALUE);
-        return;
-    }
-    bufferId = bufferItem.mGraphicBuffer->getId();
+    uint64_t bufferId = bufferItem.mGraphicBuffer->getId();
 
     if (mBuffers.find(bufferId) == mBuffers.end()) {
         SP_LOGE("%s: Acquired buffer doesn't exist in attached buffer map",
@@ -545,13 +514,12 @@
     SP_LOGV("%s: BufferTracker for buffer %" PRId64 ", number of requests %zu",
            __FUNCTION__, bufferItem.mGraphicBuffer->getId(), tracker.requestedSurfaces().size());
     for (const auto id : tracker.requestedSurfaces()) {
-
-        if (mOutputs[id] == nullptr) {
+        if (mOutputSurfaces[id] == nullptr) {
             //Output surface got likely removed by client.
             continue;
         }
 
-        res = outputBufferLocked(mOutputs[id], bufferItem, id);
+        res = outputBufferLocked(mOutputSurfaces[id], bufferItem, id);
         if (res != OK) {
             SP_LOGE("%s: outputBufferLocked failed %d", __FUNCTION__, res);
             mOnFrameAvailableRes.store(res);
@@ -590,26 +558,11 @@
     mBuffers.erase(id);
 
     uint64_t bufferId = tracker_ptr->getBuffer()->getId();
-    int consumerSlot = -1;
-    uint64_t frameNumber;
-    auto inputSlot = mInputSlots.begin();
-    for (; inputSlot != mInputSlots.end(); inputSlot++) {
-        if (inputSlot->second.mGraphicBuffer->getId() == bufferId) {
-            consumerSlot = inputSlot->second.mSlot;
-            frameNumber = inputSlot->second.mFrameNumber;
-            break;
-        }
-    }
-    if (consumerSlot == -1) {
-        SP_LOGE("%s: Buffer missing inside input slots!", __FUNCTION__);
-        return;
-    }
 
     auto detachBuffer = mDetachedBuffers.find(bufferId);
     bool detach = (detachBuffer != mDetachedBuffers.end());
     if (detach) {
         mDetachedBuffers.erase(detachBuffer);
-        mInputSlots.erase(inputSlot);
     }
     // Temporarily unlock mutex to avoid circular lock:
     // 1. This function holds splitter lock, calls releaseBuffer which triggers
@@ -618,15 +571,14 @@
     // 2. Camera3SharedOutputStream::getBufferLocked calls
     // attachBufferToOutputs, which holds the stream lock, and waits for the
     // splitter lock.
-    sp<IGraphicBufferConsumer> consumer(mConsumer);
     mMutex.unlock();
     int res = NO_ERROR;
-    if (consumer != nullptr) {
+    if (mBufferItemConsumer != nullptr) {
         if (detach) {
-            res = consumer->detachBuffer(consumerSlot);
+            res = mBufferItemConsumer->detachBuffer(tracker_ptr->getBuffer());
         } else {
-            res = consumer->releaseBuffer(consumerSlot, frameNumber,
-                    EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, tracker_ptr->getMergedFence());
+            res = mBufferItemConsumer->releaseBuffer(tracker_ptr->getBuffer(),
+                                                     tracker_ptr->getMergedFence());
         }
     } else {
         SP_LOGE("%s: consumer has become null!", __FUNCTION__);
@@ -648,23 +600,25 @@
     }
 }
 
-void Camera3StreamSplitter::onBufferReleasedByOutput(
-        const sp<IGraphicBufferProducer>& from) {
+void Camera3StreamSplitter::onBufferReleasedByOutput(const sp<Surface>& from) {
     ATRACE_CALL();
-    sp<Fence> fence;
 
-    int slot = BufferItem::INVALID_BUFFER_SLOT;
-    auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage,
-            nullptr, nullptr);
+    from->setBuffersDimensions(mWidth, mHeight);
+    from->setBuffersFormat(mFormat);
+    from->setUsage(mProducerUsage);
+
+    sp<GraphicBuffer> buffer;
+    sp<Fence> fence;
+    auto res = from->dequeueBuffer(&buffer, &fence);
     Mutex::Autolock lock(mMutex);
-    handleOutputDequeueStatusLocked(res, slot);
+    handleOutputDequeueStatusLocked(res, buffer);
     if (res != OK) {
         return;
     }
 
     size_t surfaceId = 0;
     bool found = false;
-    for (const auto& it : mOutputs) {
+    for (const auto& it : mOutputSurfaces) {
         if (it.second == from) {
             found = true;
             surfaceId = it.first;
@@ -676,36 +630,29 @@
         return;
     }
 
-    returnOutputBufferLocked(fence, from, surfaceId, slot);
+    returnOutputBufferLocked(fence, from, surfaceId, buffer);
 }
 
-void Camera3StreamSplitter::onBufferReplacedLocked(
-        const sp<IGraphicBufferProducer>& from, size_t surfaceId) {
+void Camera3StreamSplitter::onBufferReplacedLocked(const sp<Surface>& from, size_t surfaceId) {
     ATRACE_CALL();
-    sp<Fence> fence;
 
-    int slot = BufferItem::INVALID_BUFFER_SLOT;
-    auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage,
-            nullptr, nullptr);
-    handleOutputDequeueStatusLocked(res, slot);
+    from->setBuffersDimensions(mWidth, mHeight);
+    from->setBuffersFormat(mFormat);
+    from->setUsage(mProducerUsage);
+
+    sp<GraphicBuffer> buffer;
+    sp<Fence> fence;
+    auto res = from->dequeueBuffer(&buffer, &fence);
+    handleOutputDequeueStatusLocked(res, buffer);
     if (res != OK) {
         return;
     }
 
-    returnOutputBufferLocked(fence, from, surfaceId, slot);
+    returnOutputBufferLocked(fence, from, surfaceId, buffer);
 }
 
 void Camera3StreamSplitter::returnOutputBufferLocked(const sp<Fence>& fence,
-        const sp<IGraphicBufferProducer>& from, size_t surfaceId, int slot) {
-    sp<GraphicBuffer> buffer;
-
-    if (mOutputSlots[from] == nullptr) {
-        //Output surface got likely removed by client.
-        return;
-    }
-
-    auto outputSlots = *mOutputSlots[from];
-    buffer = outputSlots[slot];
+        const sp<Surface>& from, size_t surfaceId, const sp<GraphicBuffer>& buffer) {
     BufferTracker& tracker = *(mBuffers[buffer->getId()]);
     // Merge the release fence of the incoming buffer so that the fence we send
     // back to the input includes all of the outputs' fences
@@ -716,9 +663,16 @@
     auto detachBuffer = mDetachedBuffers.find(buffer->getId());
     bool detach = (detachBuffer != mDetachedBuffers.end());
     if (detach) {
-        auto res = from->detachBuffer(slot);
+        auto res = from->detachBuffer(buffer);
         if (res == NO_ERROR) {
-            outputSlots[slot] = nullptr;
+            if (mHeldBuffers.contains(from)) {
+                mHeldBuffers[from]->erase(buffer);
+            } else {
+                uint64_t surfaceId = 0;
+                from->getUniqueId(&surfaceId);
+                SP_LOGW("%s: buffer %" PRIu64 " not found in held buffers of surface %" PRIu64,
+                        __FUNCTION__, buffer->getId(), surfaceId);
+            }
         } else {
             SP_LOGE("%s: detach buffer from output failed (%d)", __FUNCTION__, res);
         }
@@ -728,22 +682,17 @@
     decrementBufRefCountLocked(buffer->getId(), surfaceId);
 }
 
-void Camera3StreamSplitter::handleOutputDequeueStatusLocked(status_t res, int slot) {
+void Camera3StreamSplitter::handleOutputDequeueStatusLocked(status_t res,
+        const sp<GraphicBuffer>& buffer) {
     if (res == NO_INIT) {
         // If we just discovered that this output has been abandoned, note that,
         // but we can't do anything else, since buffer is invalid
         onAbandonedLocked();
-    } else if (res == IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION) {
-        SP_LOGE("%s: Producer needs to re-allocate buffer!", __FUNCTION__);
-        SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
-    } else if (res == IGraphicBufferProducer::RELEASE_ALL_BUFFERS) {
-        SP_LOGE("%s: All slot->buffer mapping should be released!", __FUNCTION__);
-        SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
     } else if (res == NO_MEMORY) {
         SP_LOGE("%s: No free buffers", __FUNCTION__);
     } else if (res == WOULD_BLOCK) {
         SP_LOGE("%s: Dequeue call will block", __FUNCTION__);
-    } else if (res != OK || (slot == BufferItem::INVALID_BUFFER_SLOT)) {
+    } else if (res != OK || buffer == nullptr) {
         SP_LOGE("%s: dequeue buffer from output failed (%d)", __FUNCTION__, res);
     }
 }
@@ -762,36 +711,20 @@
     SP_LOGV("One of my outputs has abandoned me");
 }
 
-int Camera3StreamSplitter::getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
-        const sp<GraphicBuffer>& gb) {
-    auto& outputSlots = *mOutputSlots[gbp];
-
-    for (size_t i = 0; i < outputSlots.size(); i++) {
-        if (outputSlots[i] == gb) {
-            return (int)i;
-        }
-    }
-
-    SP_LOGV("%s: Cannot find slot for gb %p on output %p", __FUNCTION__, gb.get(),
-            gbp.get());
-    return BufferItem::INVALID_BUFFER_SLOT;
-}
-
-Camera3StreamSplitter::OutputListener::OutputListener(
-        wp<Camera3StreamSplitter> splitter,
-        wp<IGraphicBufferProducer> output)
-      : mSplitter(splitter), mOutput(output) {}
+Camera3StreamSplitter::OutputListener::OutputListener(wp<Camera3StreamSplitter> splitter,
+        wp<Surface> output)
+    : mSplitter(splitter), mOutput(output) {}
 
 void Camera3StreamSplitter::OutputListener::onBufferReleased() {
     ATRACE_CALL();
     sp<Camera3StreamSplitter> splitter = mSplitter.promote();
-    sp<IGraphicBufferProducer> output = mOutput.promote();
+    sp<Surface> output = mOutput.promote();
     if (splitter != nullptr && output != nullptr) {
         splitter->onBufferReleasedByOutput(output);
     }
 }
 
-void Camera3StreamSplitter::OutputListener::binderDied(const wp<IBinder>& /* who */) {
+void Camera3StreamSplitter::OutputListener::onRemoteDied() {
     sp<Camera3StreamSplitter> splitter = mSplitter.promote();
     if (splitter != nullptr) {
         Mutex::Autolock lock(splitter->mMutex);
@@ -822,3 +755,5 @@
 }
 
 } // namespace android
+
+#endif  // USE_NEW_STREAM_SPLITTER
\ No newline at end of file
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index 1feb4a0..0440e08 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -14,22 +14,25 @@
  * limitations under the License.
  */
 
-#ifndef ANDROID_SERVERS_STREAMSPLITTER_H
-#define ANDROID_SERVERS_STREAMSPLITTER_H
+#pragma once
 
+#include <memory>
 #include <unordered_set>
 
 #include <camera/CameraMetadata.h>
 
-#include <gui/IConsumerListener.h>
-#include <gui/IProducerListener.h>
 #include <gui/BufferItemConsumer.h>
+#include <gui/Surface.h>
 
 #include <utils/Condition.h>
 #include <utils/Mutex.h>
 #include <utils/StrongPointer.h>
 #include <utils/Timers.h>
 
+#include "Flags.h"
+
+#if USE_NEW_STREAM_SPLITTER  // trying to do this for each change would be a huge hassle.
+
 #define SP_LOGV(x, ...) ALOGV("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
 #define SP_LOGI(x, ...) ALOGI("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
 #define SP_LOGW(x, ...) ALOGW("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
@@ -38,8 +41,6 @@
 namespace android {
 
 class GraphicBuffer;
-class IGraphicBufferConsumer;
-class IGraphicBufferProducer;
 
 // Camera3StreamSplitter is an autonomous class that manages one input BufferQueue
 // and multiple output BufferQueues. By using the buffer attach and detach logic
@@ -47,9 +48,8 @@
 // BufferQueue, where each buffer queued to the input is available to be
 // acquired by each of the outputs, and is able to be dequeued by the input
 // again only once all of the outputs have released it.
-class Camera3StreamSplitter : public BnConsumerListener {
-public:
-
+class Camera3StreamSplitter : public BufferItemConsumer::FrameAvailableListener {
+  public:
     // Constructor
     Camera3StreamSplitter(bool useHalBufManager = false);
 
@@ -67,7 +67,7 @@
     //
     // A return value other than NO_ERROR means that an error has occurred and
     // outputQueue has not been added to the splitter. BAD_VALUE is returned if
-    // outputQueue is NULL. See IGraphicBufferProducer::connect for explanations
+    // outputQueue is NULL. See Surface::connect for explanations
     // of other error codes.
     status_t addOutput(size_t surfaceId, const sp<Surface>& outputQueue);
 
@@ -97,7 +97,7 @@
     void setHalBufferManager(bool enabled);
 
 private:
-    // From IConsumerListener
+    // From BufferItemConsumer::FrameAvailableListener
     //
     // During this callback, we store some tracking information, detach the
     // buffer from the input, and attach it to each of the outputs. This call
@@ -106,23 +106,13 @@
     // input.
     void onFrameAvailable(const BufferItem& item) override;
 
-    // From IConsumerListener
+    // From BufferItemConsumer::FrameAvailableListener
     //
     // Similar to onFrameAvailable, but buffer item is indeed replacing a buffer
     // in the buffer queue. This can happen when buffer queue is in droppable
     // mode.
     void onFrameReplaced(const BufferItem& item) override;
 
-    // From IConsumerListener
-    // We don't care about released buffers because we detach each buffer as
-    // soon as we acquire it. See the comment for onBufferReleased below for
-    // some clarifying notes about the name.
-    void onBuffersReleased() override {}
-
-    // From IConsumerListener
-    // We don't care about sideband streams, since we won't be splitting them
-    void onSidebandStreamChanged() override {}
-
     // This is the implementation of the onBufferReleased callback from
     // IProducerListener. It gets called from an OutputListener (see below), and
     // 'from' is which producer interface from which the callback was received.
@@ -132,10 +122,10 @@
     // last output releasing the buffer, and if so, release it to the input.
     // If we release the buffer to the input, we allow a blocked
     // onFrameAvailable call to proceed.
-    void onBufferReleasedByOutput(const sp<IGraphicBufferProducer>& from);
+    void onBufferReleasedByOutput(const sp<Surface>& from);
 
     // Called by outputBufferLocked when a buffer in the async buffer queue got replaced.
-    void onBufferReplacedLocked(const sp<IGraphicBufferProducer>& from, size_t surfaceId);
+    void onBufferReplacedLocked(const sp<Surface>& from, size_t surfaceId);
 
     // When this is called, the splitter disconnects from (i.e., abandons) its
     // input queue and signals any waiting onFrameAvailable calls to wake up.
@@ -149,32 +139,32 @@
     void decrementBufRefCountLocked(uint64_t id, size_t surfaceId);
 
     // Check for and handle any output surface dequeue errors.
-    void handleOutputDequeueStatusLocked(status_t res, int slot);
+    void handleOutputDequeueStatusLocked(status_t res, const sp<GraphicBuffer>& buffer);
 
     // Handles released output surface buffers.
-    void returnOutputBufferLocked(const sp<Fence>& fence, const sp<IGraphicBufferProducer>& from,
-            size_t surfaceId, int slot);
+    void returnOutputBufferLocked(const sp<Fence>& fence, const sp<Surface>& from, size_t surfaceId,
+            const sp<GraphicBuffer>& buffer);
 
     // This is a thin wrapper class that lets us determine which BufferQueue
     // the IProducerListener::onBufferReleased callback is associated with. We
     // create one of these per output BufferQueue, and then pass the producer
     // into onBufferReleasedByOutput above.
-    class OutputListener : public BnProducerListener,
-                           public IBinder::DeathRecipient {
-    public:
-        OutputListener(wp<Camera3StreamSplitter> splitter,
-                wp<IGraphicBufferProducer> output);
+    class OutputListener : public SurfaceListener {
+      public:
+        OutputListener(wp<Camera3StreamSplitter> splitter, wp<Surface> output);
         virtual ~OutputListener() = default;
 
-        // From IProducerListener
+        // From SurfaceListener
         void onBufferReleased() override;
+        bool needsReleaseNotify() override { return true; };
+        void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>&) override {}
+        void onBufferDetached(int /*slot*/) override {}
 
-        // From IBinder::DeathRecipient
-        void binderDied(const wp<IBinder>& who) override;
+        void onRemoteDied() override;
 
     private:
         wp<Camera3StreamSplitter> mSplitter;
-        wp<IGraphicBufferProducer> mOutput;
+        wp<Surface> mOutput;
     };
 
     class BufferTracker {
@@ -195,7 +185,6 @@
         const std::vector<size_t> requestedSurfaces() const { return mRequestedSurfaces; }
 
     private:
-
         // Disallow copying
         BufferTracker(const BufferTracker& other);
         BufferTracker& operator=(const BufferTracker& other);
@@ -220,16 +209,12 @@
     // Send a buffer to particular output, and increment the reference count
     // of the buffer. If this output is abandoned, the buffer's reference count
     // won't be incremented.
-    status_t outputBufferLocked(const sp<IGraphicBufferProducer>& output,
-            const BufferItem& bufferItem, size_t surfaceId);
+    status_t outputBufferLocked(const sp<Surface>& output, const BufferItem& bufferItem,
+            size_t surfaceId);
 
     // Get unique name for the buffer queue consumer
     std::string getUniqueConsumerName();
 
-    // Helper function to get the BufferQueue slot where a particular buffer is attached to.
-    int getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
-            const sp<GraphicBuffer>& gb);
-
     // Sum of max consumer buffers for all outputs
     size_t mMaxConsumerBuffers = 0;
     size_t mMaxHalBuffers = 0;
@@ -246,17 +231,9 @@
 
     Mutex mMutex;
 
-    sp<IGraphicBufferProducer> mProducer;
-    sp<IGraphicBufferConsumer> mConsumer;
     sp<BufferItemConsumer> mBufferItemConsumer;
     sp<Surface> mSurface;
 
-    //Map graphic buffer ids -> buffer items
-    std::unordered_map<uint64_t, BufferItem> mInputSlots;
-
-    //Map surface ids -> gbp outputs
-    std::unordered_map<int, sp<IGraphicBufferProducer> > mOutputs;
-
     //Map surface ids -> gbp outputs
     std::unordered_map<int, sp<Surface>> mOutputSurfaces;
 
@@ -268,18 +245,22 @@
     // buffer, but also contain merged release fences).
     std::unordered_map<uint64_t, std::unique_ptr<BufferTracker> > mBuffers;
 
-    struct GBPHash {
-        std::size_t operator()(const sp<IGraphicBufferProducer>& producer) const {
-            return std::hash<IGraphicBufferProducer *>{}(producer.get());
+    struct SurfaceHash {
+        std::size_t operator()(const sp<Surface>& producer) const {
+            return std::hash<Surface*>{}(producer.get());
         }
     };
 
-    std::unordered_map<sp<IGraphicBufferProducer>, sp<OutputListener>,
-            GBPHash> mNotifiers;
+    struct BufferHash {
+        std::size_t operator()(const sp<GraphicBuffer>& buffer) const {
+            return std::hash<GraphicBuffer*>{}(buffer.get());
+        }
+    };
 
-    typedef std::vector<sp<GraphicBuffer>> OutputSlots;
-    std::unordered_map<sp<IGraphicBufferProducer>, std::unique_ptr<OutputSlots>,
-            GBPHash> mOutputSlots;
+    std::unordered_map<sp<Surface>, sp<OutputListener>, SurfaceHash> mNotifiers;
+
+    typedef std::unordered_set<sp<GraphicBuffer>, BufferHash> HeldBuffers;
+    std::unordered_map<sp<Surface>, std::unique_ptr<HeldBuffers>, SurfaceHash> mHeldBuffers;
 
     //A set of buffers that could potentially stay in some of the outputs after removal
     //and therefore should be detached from the input queue.
@@ -298,4 +279,4 @@
 
 } // namespace android
 
-#endif
+#endif  // USE_NEW_STREAM_SPLITTER
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index 9a3f433..9a4e9e3 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -43,9 +43,7 @@
             kResultPointsToCorrectNoClamp.begin(),
             kResultPointsToCorrectNoClamp.end());
     mRemappedKeys.insert(ANDROID_DISTORTION_CORRECTION_MODE);
-    if (flags::concert_mode()) {
-        mRemappedKeys.insert(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION);
-    }
+    mRemappedKeys.insert(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION);
 }
 
 bool DistortionMapper::isDistortionSupported(const CameraMetadata &deviceInfo) {
diff --git a/services/camera/libcameraservice/device3/Flags.h b/services/camera/libcameraservice/device3/Flags.h
new file mode 100644
index 0000000..ca0006b
--- /dev/null
+++ b/services/camera/libcameraservice/device3/Flags.h
@@ -0,0 +1,27 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <com_android_graphics_libgui_flags.h>
+
+#ifndef USE_NEW_STREAM_SPLITTER
+
+#define USE_NEW_STREAM_SPLITTER                              \
+    COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_STREAM_SPLITTER) && \
+            COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_PLATFORM_API_IMPROVEMENTS)
+
+#endif  // USE_NEW_STREAM_SPLITTER
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index 83caa00..a04406e 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -18,10 +18,16 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <com_android_internal_camera_flags.h>
+
 #include <utils/Log.h>
 
 #include "PreviewFrameSpacer.h"
 #include "Camera3OutputStream.h"
+#include "utils/SchedulingPolicyUtils.h"
+#include "utils/Utils.h"
+
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 
@@ -129,6 +135,24 @@
     mLastCameraReadoutTime = bufferHolder.readoutTimestamp;
 }
 
+status_t PreviewFrameSpacer::run(const char* name, int32_t priority, size_t stack) {
+    auto ret = Thread::run(name, priority, stack);
+    if (flags::bump_preview_frame_space_priority()) {
+        // Boost priority of the preview frame spacer thread to SCHED_FIFO.
+        pid_t previewFrameSpacerTid = getTid();
+        auto res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), previewFrameSpacerTid,
+                RunThreadWithRealtimePriority::kRequestThreadPriority);
+        if (res != OK) {
+            ALOGW("Can't set realtime priority for preview frame spacer thread: %s (%d)",
+                    strerror(-res), res);
+        } else {
+            ALOGV("Set real time priority for preview frame spacer thread (tid %d)",
+                    previewFrameSpacerTid);
+        }
+    }
+    return ret;
+}
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index f46de3d..ab85189 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -58,6 +58,7 @@
 
     bool threadLoop() override;
     void requestExit() override;
+    status_t run(const char* name, int32_t priority = PRIORITY_DEFAULT, size_t stack = 0) override;
 
   private:
     // structure holding cached preview buffer info
diff --git a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
index 8bb22a9..1ea077c 100644
--- a/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
+++ b/services/camera/libcameraservice/device3/RotateAndCropMapper.cpp
@@ -37,9 +37,7 @@
 
     mRemappedKeys.insert(ANDROID_SCALER_ROTATE_AND_CROP);
     mRemappedKeys.insert(ANDROID_SCALER_CROP_REGION);
-    if (flags::concert_mode()) {
-        mRemappedKeys.insert(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION);
-    }
+    mRemappedKeys.insert(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION);
 }
 
 bool RotateAndCropMapper::isNeeded(const CameraMetadata* deviceInfo) {
diff --git a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
index 1f52e9b..2016284 100644
--- a/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
+++ b/services/camera/libcameraservice/device3/ZoomRatioMapper.cpp
@@ -38,9 +38,7 @@
             kResultPointsToCorrectNoClamp.end());
 
     mRemappedKeys.insert(ANDROID_CONTROL_ZOOM_RATIO);
-    if (flags::concert_mode()) {
-        mRemappedKeys.insert(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION);
-    }
+    mRemappedKeys.insert(ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION);
 }
 
 status_t ZoomRatioMapper::initZoomRatioInTemplate(CameraMetadata *request) {
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
index 7f30f5e..e52e9a2 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.cpp
@@ -173,10 +173,10 @@
 AidlCamera3Device::AidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-        const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+        const std::string& id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient) :
         Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
-                overrideForPerfClass, overrideToPortrait, legacyClient) {
+                overrideForPerfClass, rotationOverride, legacyClient) {
     mCallbacks = ndk::SharedRefBase::make<AidlCameraDeviceCallbacks>(this);
 }
 
@@ -207,7 +207,7 @@
       return INVALID_OPERATION;
     }
     res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
-            mOverrideToPortrait);
+            mRotationOverride);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -223,7 +223,7 @@
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
                     physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
-                    mOverrideToPortrait);
+                    mRotationOverride);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -349,7 +349,7 @@
                 readoutSupported.data.u8[0] == ANDROID_SENSOR_READOUT_TIMESTAMP_HARDWARE;
     }
 
-    return initializeCommonLocked();
+    return initializeCommonLocked(manager);
 }
 
 ::ndk::ScopedAStatus AidlCamera3Device::AidlCameraDeviceCallbacks::processCaptureResult(
@@ -417,7 +417,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
-        mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
+        mRotationOverride, mActivePhysicalId}, mResultMetadataQueue
     };
 
     for (const auto& result : results) {
@@ -459,7 +459,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, *(mInterface), mLegacyClient, mMinExpectedDuration, mIsFixedFps,
-        mOverrideToPortrait, mActivePhysicalId}, mResultMetadataQueue
+        mRotationOverride, mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg, mSensorReadoutTimestampSupported);
@@ -918,12 +918,6 @@
         camera3::camera_stream_t *src = config->streams[i];
 
         Camera3Stream* cam3stream = Camera3Stream::cast(src);
-        // For stream configurations with multi res streams, hal buffer manager has to be used.
-        if (!flags::session_hal_buf_manager() && cam3stream->getHalStreamGroupId() != -1 &&
-                src->stream_type != CAMERA_STREAM_INPUT) {
-            mUseHalBufManager = true;
-            config->use_hal_buf_manager = true;
-        }
         cam3stream->setBufferFreedListener(this);
         int streamId = cam3stream->getId();
         StreamType streamType;
@@ -1002,8 +996,7 @@
               err.getMessage());
         return AidlProviderInfo::mapToStatusT(err);
     }
-    if (flags::session_hal_buf_manager() && interfaceVersion >= AIDL_DEVICE_SESSION_V3
-            && mSupportSessionHalBufManager) {
+    if (interfaceVersion >= AIDL_DEVICE_SESSION_V3 && mSupportSessionHalBufManager) {
         err = mAidlSession->configureStreamsV2(requestedConfiguration, &configureStreamsRet);
         finalConfiguration = std::move(configureStreamsRet.halStreams);
     } else {
@@ -1015,18 +1008,16 @@
         return AidlProviderInfo::mapToStatusT(err);
     }
 
-    if (flags::session_hal_buf_manager()) {
-        std::set<int32_t> halBufferManagedStreamIds;
-        for (const auto &halStream: finalConfiguration) {
-            if ((interfaceVersion >= AIDL_DEVICE_SESSION_V3 &&
-                    mSupportSessionHalBufManager && halStream.enableHalBufferManager)
-                    || mUseHalBufManager) {
-                halBufferManagedStreamIds.insert(halStream.id);
-            }
+    std::set<int32_t> halBufferManagedStreamIds;
+    for (const auto &halStream: finalConfiguration) {
+        if ((interfaceVersion >= AIDL_DEVICE_SESSION_V3 &&
+                mSupportSessionHalBufManager && halStream.enableHalBufferManager)
+                || mUseHalBufManager) {
+            halBufferManagedStreamIds.insert(halStream.id);
         }
-        mHalBufManagedStreamIds = std::move(halBufferManagedStreamIds);
-        config->hal_buffer_managed_streams = mHalBufManagedStreamIds;
     }
+    mHalBufManagedStreamIds = std::move(halBufferManagedStreamIds);
+    config->hal_buffer_managed_streams = mHalBufManagedStreamIds;
     // And convert output stream configuration from AIDL
     for (size_t i = 0; i < config->num_streams; i++) {
         camera3::camera_stream_t *dst = config->streams[i];
@@ -1096,10 +1087,8 @@
             }
             dstStream->setUsage(
                     mapProducerToFrameworkUsage(src.producerUsage));
-            if (flags::session_hal_buf_manager()) {
-                dstStream->setHalBufferManager(
-                        contains(config->hal_buffer_managed_streams, streamId));
-            }
+            dstStream->setHalBufferManager(
+                    contains(config->hal_buffer_managed_streams, streamId));
         }
         dst->max_buffers = src.maxBuffers;
     }
@@ -1480,10 +1469,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys,
-                  useHalBufManager, supportCameraMute, overrideToPortrait,
+                  useHalBufManager, supportCameraMute, rotationOverride,
                   supportSettingsOverride) {}
 
 status_t AidlCamera3Device::AidlRequestThread::switchToOffline(
@@ -1714,10 +1703,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) {
     return new AidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-            useHalBufManager, supportCameraMute, overrideToPortrait,
+            useHalBufManager, supportCameraMute, rotationOverride,
             supportSettingsOverride);
 };
 
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
index ac29bbc..abc3f9c 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3Device.h
@@ -42,7 +42,7 @@
     explicit AidlCamera3Device(
             std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
             std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-            const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+            const std::string& id, bool overrideForPerfClass, int rotationOverride,
             bool legacyClient = false);
 
     virtual ~AidlCamera3Device() { }
@@ -184,7 +184,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
 
         status_t switchToOffline(
@@ -275,7 +275,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
index f8308df..cc32c2a 100644
--- a/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/aidl/AidlCamera3OfflineSession.cpp
@@ -29,6 +29,7 @@
 
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <android/binder_ibinder_platform.h>
 #include <camera/StringUtils.h>
@@ -127,7 +128,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -174,7 +175,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this,
         *this, mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg, mSensorReadoutTimestampSupported);
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
new file mode 100644
index 0000000..c1113e5
--- /dev/null
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.cpp
@@ -0,0 +1,820 @@
+/*
+ * Copyright 2014,2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <inttypes.h>
+
+#define LOG_TAG "DeprecatedCamera3StreamSplitter"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+// #define LOG_NDEBUG 0
+
+#include <camera/StringUtils.h>
+#include <gui/BufferItem.h>
+#include <gui/BufferQueue.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+
+#include <ui/GraphicBuffer.h>
+
+#include <binder/ProcessState.h>
+
+#include <utils/Trace.h>
+
+#include <cutils/atomic.h>
+
+#include "../Camera3Stream.h"
+
+#include "DeprecatedCamera3StreamSplitter.h"
+
+namespace android {
+
+status_t DeprecatedCamera3StreamSplitter::connect(
+        const std::unordered_map<size_t, sp<Surface>>& surfaces, uint64_t consumerUsage,
+        uint64_t producerUsage, size_t halMaxBuffers, uint32_t width, uint32_t height,
+        android::PixelFormat format, sp<Surface>* consumer, int64_t dynamicRangeProfile) {
+    ATRACE_CALL();
+    if (consumer == nullptr) {
+        SP_LOGE("%s: consumer pointer is NULL", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    Mutex::Autolock lock(mMutex);
+    status_t res = OK;
+
+    if (mOutputs.size() > 0 || mConsumer != nullptr) {
+        SP_LOGE("%s: already connected", __FUNCTION__);
+        return BAD_VALUE;
+    }
+    if (mBuffers.size() > 0) {
+        SP_LOGE("%s: still has %zu pending buffers", __FUNCTION__, mBuffers.size());
+        return BAD_VALUE;
+    }
+
+    mMaxHalBuffers = halMaxBuffers;
+    mConsumerName = getUniqueConsumerName();
+    mDynamicRangeProfile = dynamicRangeProfile;
+    // Add output surfaces. This has to be before creating internal buffer queue
+    // in order to get max consumer side buffers.
+    for (auto& it : surfaces) {
+        if (it.second == nullptr) {
+            SP_LOGE("%s: Fatal: surface is NULL", __FUNCTION__);
+            return BAD_VALUE;
+        }
+        res = addOutputLocked(it.first, it.second);
+        if (res != OK) {
+            SP_LOGE("%s: Failed to add output surface: %s(%d)", __FUNCTION__, strerror(-res), res);
+            return res;
+        }
+    }
+
+    // Create BufferQueue for input
+    BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+
+    // Allocate 1 extra buffer to handle the case where all buffers are detached
+    // from input, and attached to the outputs. In this case, the input queue's
+    // dequeueBuffer can still allocate 1 extra buffer before being blocked by
+    // the output's attachBuffer().
+    mMaxConsumerBuffers++;
+    mBufferItemConsumer = new BufferItemConsumer(mConsumer, consumerUsage, mMaxConsumerBuffers);
+    if (mBufferItemConsumer == nullptr) {
+        return NO_MEMORY;
+    }
+    mConsumer->setConsumerName(toString8(mConsumerName));
+
+    *consumer = new Surface(mProducer);
+    if (*consumer == nullptr) {
+        return NO_MEMORY;
+    }
+
+    res = mProducer->setAsyncMode(true);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to enable input queue async mode: %s(%d)", __FUNCTION__, strerror(-res),
+                res);
+        return res;
+    }
+
+    res = mConsumer->consumerConnect(this, /* controlledByApp */ false);
+
+    mWidth = width;
+    mHeight = height;
+    mFormat = format;
+    mProducerUsage = producerUsage;
+    mAcquiredInputBuffers = 0;
+
+    SP_LOGV("%s: connected", __FUNCTION__);
+    return res;
+}
+
+status_t DeprecatedCamera3StreamSplitter::getOnFrameAvailableResult() {
+    ATRACE_CALL();
+    return mOnFrameAvailableRes.load();
+}
+
+void DeprecatedCamera3StreamSplitter::disconnect() {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+
+    for (auto& notifier : mNotifiers) {
+        sp<IGraphicBufferProducer> producer = notifier.first;
+        sp<OutputListener> listener = notifier.second;
+        IInterface::asBinder(producer)->unlinkToDeath(listener);
+    }
+    mNotifiers.clear();
+
+    for (auto& output : mOutputs) {
+        if (output.second != nullptr) {
+            output.second->disconnect(NATIVE_WINDOW_API_CAMERA);
+        }
+    }
+    mOutputs.clear();
+    mOutputSurfaces.clear();
+    mOutputSlots.clear();
+    mConsumerBufferCount.clear();
+
+    if (mConsumer.get() != nullptr) {
+        mConsumer->consumerDisconnect();
+    }
+
+    if (mBuffers.size() > 0) {
+        SP_LOGW("%zu buffers still being tracked", mBuffers.size());
+        mBuffers.clear();
+    }
+
+    mMaxHalBuffers = 0;
+    mMaxConsumerBuffers = 0;
+    mAcquiredInputBuffers = 0;
+    SP_LOGV("%s: Disconnected", __FUNCTION__);
+}
+
+DeprecatedCamera3StreamSplitter::DeprecatedCamera3StreamSplitter(bool useHalBufManager)
+    : mUseHalBufManager(useHalBufManager) {}
+
+DeprecatedCamera3StreamSplitter::~DeprecatedCamera3StreamSplitter() {
+    disconnect();
+}
+
+status_t DeprecatedCamera3StreamSplitter::addOutput(size_t surfaceId,
+                                                    const sp<Surface>& outputQueue) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+    status_t res = addOutputLocked(surfaceId, outputQueue);
+
+    if (res != OK) {
+        SP_LOGE("%s: addOutputLocked failed %d", __FUNCTION__, res);
+        return res;
+    }
+
+    if (mMaxConsumerBuffers > mAcquiredInputBuffers) {
+        res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+    }
+
+    return res;
+}
+
+void DeprecatedCamera3StreamSplitter::setHalBufferManager(bool enabled) {
+    Mutex::Autolock lock(mMutex);
+    mUseHalBufManager = enabled;
+}
+
+status_t DeprecatedCamera3StreamSplitter::addOutputLocked(size_t surfaceId,
+                                                          const sp<Surface>& outputQueue) {
+    ATRACE_CALL();
+    if (outputQueue == nullptr) {
+        SP_LOGE("addOutput: outputQueue must not be NULL");
+        return BAD_VALUE;
+    }
+
+    if (mOutputs[surfaceId] != nullptr) {
+        SP_LOGE("%s: surfaceId: %u already taken!", __FUNCTION__, (unsigned)surfaceId);
+        return BAD_VALUE;
+    }
+
+    status_t res = native_window_set_buffers_dimensions(outputQueue.get(), mWidth, mHeight);
+    if (res != NO_ERROR) {
+        SP_LOGE("addOutput: failed to set buffer dimensions (%d)", res);
+        return res;
+    }
+    res = native_window_set_buffers_format(outputQueue.get(), mFormat);
+    if (res != OK) {
+        ALOGE("%s: Unable to configure stream buffer format %#x for surfaceId %zu", __FUNCTION__,
+              mFormat, surfaceId);
+        return res;
+    }
+
+    sp<IGraphicBufferProducer> gbp = outputQueue->getIGraphicBufferProducer();
+    // Connect to the buffer producer
+    sp<OutputListener> listener(new OutputListener(this, gbp));
+    IInterface::asBinder(gbp)->linkToDeath(listener);
+    res = outputQueue->connect(NATIVE_WINDOW_API_CAMERA, listener);
+    if (res != NO_ERROR) {
+        SP_LOGE("addOutput: failed to connect (%d)", res);
+        return res;
+    }
+
+    // Query consumer side buffer count, and update overall buffer count
+    int maxConsumerBuffers = 0;
+    res = static_cast<ANativeWindow*>(outputQueue.get())
+                  ->query(outputQueue.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+                          &maxConsumerBuffers);
+    if (res != OK) {
+        SP_LOGE("%s: Unable to query consumer undequeued buffer count"
+                " for surface",
+                __FUNCTION__);
+        return res;
+    }
+
+    SP_LOGV("%s: Consumer wants %d buffers, Producer wants %zu", __FUNCTION__, maxConsumerBuffers,
+            mMaxHalBuffers);
+    // The output slot count requirement can change depending on the current amount
+    // of outputs and incoming buffer consumption rate. To avoid any issues with
+    // insufficient slots, set their count to the maximum supported. The output
+    // surface buffer allocation is disabled so no real buffers will get allocated.
+    size_t totalBufferCount = BufferQueue::NUM_BUFFER_SLOTS;
+    res = native_window_set_buffer_count(outputQueue.get(), totalBufferCount);
+    if (res != OK) {
+        SP_LOGE("%s: Unable to set buffer count for surface %p", __FUNCTION__, outputQueue.get());
+        return res;
+    }
+
+    // Set dequeueBuffer/attachBuffer timeout if the consumer is not hw composer or hw texture.
+    // We need skip these cases as timeout will disable the non-blocking (async) mode.
+    uint64_t usage = 0;
+    res = native_window_get_consumer_usage(static_cast<ANativeWindow*>(outputQueue.get()), &usage);
+    if (!(usage & (GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_TEXTURE))) {
+        nsecs_t timeout =
+                mUseHalBufManager ? kHalBufMgrDequeueBufferTimeout : kNormalDequeueBufferTimeout;
+        outputQueue->setDequeueTimeout(timeout);
+    }
+
+    res = gbp->allowAllocation(false);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to turn off allocation for outputQueue", __FUNCTION__);
+        return res;
+    }
+
+    // Add new entry into mOutputs
+    mOutputs[surfaceId] = gbp;
+    mOutputSurfaces[surfaceId] = outputQueue;
+    mConsumerBufferCount[surfaceId] = maxConsumerBuffers;
+    if (mConsumerBufferCount[surfaceId] > mMaxHalBuffers) {
+        SP_LOGW("%s: Consumer buffer count %zu larger than max. Hal buffers: %zu", __FUNCTION__,
+                mConsumerBufferCount[surfaceId], mMaxHalBuffers);
+    }
+    mNotifiers[gbp] = listener;
+    mOutputSlots[gbp] = std::make_unique<OutputSlots>(totalBufferCount);
+
+    mMaxConsumerBuffers += maxConsumerBuffers;
+    return NO_ERROR;
+}
+
+status_t DeprecatedCamera3StreamSplitter::removeOutput(size_t surfaceId) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+
+    status_t res = removeOutputLocked(surfaceId);
+    if (res != OK) {
+        SP_LOGE("%s: removeOutputLocked failed %d", __FUNCTION__, res);
+        return res;
+    }
+
+    if (mAcquiredInputBuffers < mMaxConsumerBuffers) {
+        res = mConsumer->setMaxAcquiredBufferCount(mMaxConsumerBuffers);
+        if (res != OK) {
+            SP_LOGE("%s: setMaxAcquiredBufferCount failed %d", __FUNCTION__, res);
+            return res;
+        }
+    }
+
+    return res;
+}
+
+status_t DeprecatedCamera3StreamSplitter::removeOutputLocked(size_t surfaceId) {
+    if (mOutputs[surfaceId] == nullptr) {
+        SP_LOGE("%s: output surface is not present!", __FUNCTION__);
+        return BAD_VALUE;
+    }
+
+    sp<IGraphicBufferProducer> gbp = mOutputs[surfaceId];
+    // Search and decrement the ref. count of any buffers that are
+    // still attached to the removed surface.
+    std::vector<uint64_t> pendingBufferIds;
+    auto& outputSlots = *mOutputSlots[gbp];
+    for (size_t i = 0; i < outputSlots.size(); i++) {
+        if (outputSlots[i] != nullptr) {
+            pendingBufferIds.push_back(outputSlots[i]->getId());
+            auto rc = gbp->detachBuffer(i);
+            if (rc != NO_ERROR) {
+                // Buffers that fail to detach here will be scheduled for detach in the
+                // input buffer queue and the rest of the registered outputs instead.
+                // This will help ensure that camera stops accessing buffers that still
+                // can get referenced by the disconnected output.
+                mDetachedBuffers.emplace(outputSlots[i]->getId());
+            }
+        }
+    }
+    mOutputs[surfaceId] = nullptr;
+    mOutputSurfaces[surfaceId] = nullptr;
+    mOutputSlots[gbp] = nullptr;
+    for (const auto& id : pendingBufferIds) {
+        decrementBufRefCountLocked(id, surfaceId);
+    }
+
+    auto res = IInterface::asBinder(gbp)->unlinkToDeath(mNotifiers[gbp]);
+    if (res != OK) {
+        SP_LOGE("%s: Failed to unlink producer death listener: %d ", __FUNCTION__, res);
+        return res;
+    }
+
+    res = gbp->disconnect(NATIVE_WINDOW_API_CAMERA);
+    if (res != OK) {
+        SP_LOGE("%s: Unable disconnect from producer interface: %d ", __FUNCTION__, res);
+        return res;
+    }
+
+    mNotifiers[gbp] = nullptr;
+    mMaxConsumerBuffers -= mConsumerBufferCount[surfaceId];
+    mConsumerBufferCount[surfaceId] = 0;
+
+    return res;
+}
+
+status_t DeprecatedCamera3StreamSplitter::outputBufferLocked(
+        const sp<IGraphicBufferProducer>& output, const BufferItem& bufferItem, size_t surfaceId) {
+    ATRACE_CALL();
+    status_t res;
+    IGraphicBufferProducer::QueueBufferInput queueInput(
+            bufferItem.mTimestamp, bufferItem.mIsAutoTimestamp, bufferItem.mDataSpace,
+            bufferItem.mCrop, static_cast<int32_t>(bufferItem.mScalingMode), bufferItem.mTransform,
+            bufferItem.mFence);
+
+    IGraphicBufferProducer::QueueBufferOutput queueOutput;
+
+    uint64_t bufferId = bufferItem.mGraphicBuffer->getId();
+    const BufferTracker& tracker = *(mBuffers[bufferId]);
+    int slot = getSlotForOutputLocked(output, tracker.getBuffer());
+
+    if (mOutputSurfaces[surfaceId] != nullptr) {
+        sp<ANativeWindow> anw = mOutputSurfaces[surfaceId];
+        camera3::Camera3Stream::queueHDRMetadata(
+                bufferItem.mGraphicBuffer->getNativeBuffer()->handle, anw, mDynamicRangeProfile);
+    } else {
+        SP_LOGE("%s: Invalid surface id: %zu!", __FUNCTION__, surfaceId);
+    }
+
+    // In case the output BufferQueue has its own lock, if we hold splitter lock while calling
+    // queueBuffer (which will try to acquire the output lock), the output could be holding its
+    // own lock calling releaseBuffer (which  will try to acquire the splitter lock), running into
+    // circular lock situation.
+    mMutex.unlock();
+    res = output->queueBuffer(slot, queueInput, &queueOutput);
+    mMutex.lock();
+
+    SP_LOGV("%s: Queuing buffer to buffer queue %p slot %d returns %d", __FUNCTION__, output.get(),
+            slot, res);
+    // During buffer queue 'mMutex' is not held which makes the removal of
+    //"output" possible. Check whether this is the case and return.
+    if (mOutputSlots[output] == nullptr) {
+        return res;
+    }
+    if (res != OK) {
+        if (res != NO_INIT && res != DEAD_OBJECT) {
+            SP_LOGE("Queuing buffer to output failed (%d)", res);
+        }
+        // If we just discovered that this output has been abandoned, note
+        // that, increment the release count so that we still release this
+        // buffer eventually, and move on to the next output
+        onAbandonedLocked();
+        decrementBufRefCountLocked(bufferItem.mGraphicBuffer->getId(), surfaceId);
+        return res;
+    }
+
+    // If the queued buffer replaces a pending buffer in the async
+    // queue, no onBufferReleased is called by the buffer queue.
+    // Proactively trigger the callback to avoid buffer loss.
+    if (queueOutput.bufferReplaced) {
+        onBufferReplacedLocked(output, surfaceId);
+    }
+
+    return res;
+}
+
+std::string DeprecatedCamera3StreamSplitter::getUniqueConsumerName() {
+    static volatile int32_t counter = 0;
+    return fmt::sprintf("DeprecatedCamera3StreamSplitter-%d", android_atomic_inc(&counter));
+}
+
+status_t DeprecatedCamera3StreamSplitter::notifyBufferReleased(const sp<GraphicBuffer>& buffer) {
+    ATRACE_CALL();
+
+    Mutex::Autolock lock(mMutex);
+
+    uint64_t bufferId = buffer->getId();
+    std::unique_ptr<BufferTracker> tracker_ptr = std::move(mBuffers[bufferId]);
+    mBuffers.erase(bufferId);
+
+    return OK;
+}
+
+status_t DeprecatedCamera3StreamSplitter::attachBufferToOutputs(
+        ANativeWindowBuffer* anb, const std::vector<size_t>& surface_ids) {
+    ATRACE_CALL();
+    status_t res = OK;
+
+    Mutex::Autolock lock(mMutex);
+
+    sp<GraphicBuffer> gb(static_cast<GraphicBuffer*>(anb));
+    uint64_t bufferId = gb->getId();
+
+    // Initialize buffer tracker for this input buffer
+    auto tracker = std::make_unique<BufferTracker>(gb, surface_ids);
+
+    for (auto& surface_id : surface_ids) {
+        sp<IGraphicBufferProducer>& gbp = mOutputs[surface_id];
+        if (gbp.get() == nullptr) {
+            // Output surface got likely removed by client.
+            continue;
+        }
+        int slot = getSlotForOutputLocked(gbp, gb);
+        if (slot != BufferItem::INVALID_BUFFER_SLOT) {
+            // Buffer is already attached to this output surface.
+            continue;
+        }
+        // Temporarly Unlock the mutex when trying to attachBuffer to the output
+        // queue, because attachBuffer could block in case of a slow consumer. If
+        // we block while holding the lock, onFrameAvailable and onBufferReleased
+        // will block as well because they need to acquire the same lock.
+        mMutex.unlock();
+        res = gbp->attachBuffer(&slot, gb);
+        mMutex.lock();
+        if (res != OK) {
+            SP_LOGE("%s: Cannot attachBuffer from GraphicBufferProducer %p: %s (%d)", __FUNCTION__,
+                    gbp.get(), strerror(-res), res);
+            // TODO: might need to detach/cleanup the already attached buffers before return?
+            return res;
+        }
+        if ((slot < 0) || (slot > BufferQueue::NUM_BUFFER_SLOTS)) {
+            SP_LOGE("%s: Slot received %d either bigger than expected maximum %d or negative!",
+                    __FUNCTION__, slot, BufferQueue::NUM_BUFFER_SLOTS);
+            return BAD_VALUE;
+        }
+        // During buffer attach 'mMutex' is not held which makes the removal of
+        //"gbp" possible. Check whether this is the case and continue.
+        if (mOutputSlots[gbp] == nullptr) {
+            continue;
+        }
+        auto& outputSlots = *mOutputSlots[gbp];
+        if (static_cast<size_t>(slot + 1) > outputSlots.size()) {
+            outputSlots.resize(slot + 1);
+        }
+        if (outputSlots[slot] != nullptr) {
+            // If the buffer is attached to a slot which already contains a buffer,
+            // the previous buffer will be removed from the output queue. Decrement
+            // the reference count accordingly.
+            decrementBufRefCountLocked(outputSlots[slot]->getId(), surface_id);
+        }
+        SP_LOGV("%s: Attached buffer %p to slot %d on output %p.", __FUNCTION__, gb.get(), slot,
+                gbp.get());
+        outputSlots[slot] = gb;
+    }
+
+    mBuffers[bufferId] = std::move(tracker);
+
+    return res;
+}
+
+void DeprecatedCamera3StreamSplitter::onFrameAvailable(const BufferItem& /*item*/) {
+    ATRACE_CALL();
+    Mutex::Autolock lock(mMutex);
+
+    // Acquire and detach the buffer from the input
+    BufferItem bufferItem;
+    status_t res = mConsumer->acquireBuffer(&bufferItem, /* presentWhen */ 0);
+    if (res != NO_ERROR) {
+        SP_LOGE("%s: Acquiring buffer from input failed (%d)", __FUNCTION__, res);
+        mOnFrameAvailableRes.store(res);
+        return;
+    }
+
+    uint64_t bufferId;
+    if (bufferItem.mGraphicBuffer != nullptr) {
+        mInputSlots[bufferItem.mSlot] = bufferItem;
+    } else if (bufferItem.mAcquireCalled) {
+        bufferItem.mGraphicBuffer = mInputSlots[bufferItem.mSlot].mGraphicBuffer;
+        mInputSlots[bufferItem.mSlot].mFrameNumber = bufferItem.mFrameNumber;
+    } else {
+        SP_LOGE("%s: Invalid input graphic buffer!", __FUNCTION__);
+        mOnFrameAvailableRes.store(BAD_VALUE);
+        return;
+    }
+    bufferId = bufferItem.mGraphicBuffer->getId();
+
+    if (mBuffers.find(bufferId) == mBuffers.end()) {
+        SP_LOGE("%s: Acquired buffer doesn't exist in attached buffer map", __FUNCTION__);
+        mOnFrameAvailableRes.store(INVALID_OPERATION);
+        return;
+    }
+
+    mAcquiredInputBuffers++;
+    SP_LOGV("acquired buffer %" PRId64 " from input at slot %d", bufferItem.mGraphicBuffer->getId(),
+            bufferItem.mSlot);
+
+    if (bufferItem.mTransformToDisplayInverse) {
+        bufferItem.mTransform |= NATIVE_WINDOW_TRANSFORM_INVERSE_DISPLAY;
+    }
+
+    // Attach and queue the buffer to each of the outputs
+    BufferTracker& tracker = *(mBuffers[bufferId]);
+
+    SP_LOGV("%s: BufferTracker for buffer %" PRId64 ", number of requests %zu", __FUNCTION__,
+            bufferItem.mGraphicBuffer->getId(), tracker.requestedSurfaces().size());
+    for (const auto id : tracker.requestedSurfaces()) {
+        if (mOutputs[id] == nullptr) {
+            // Output surface got likely removed by client.
+            continue;
+        }
+
+        res = outputBufferLocked(mOutputs[id], bufferItem, id);
+        if (res != OK) {
+            SP_LOGE("%s: outputBufferLocked failed %d", __FUNCTION__, res);
+            mOnFrameAvailableRes.store(res);
+            // If we fail to send buffer to certain output, keep sending to
+            // other outputs.
+            continue;
+        }
+    }
+
+    mOnFrameAvailableRes.store(res);
+}
+
+void DeprecatedCamera3StreamSplitter::onFrameReplaced(const BufferItem& item) {
+    ATRACE_CALL();
+    onFrameAvailable(item);
+}
+
+void DeprecatedCamera3StreamSplitter::decrementBufRefCountLocked(uint64_t id, size_t surfaceId) {
+    ATRACE_CALL();
+
+    if (mBuffers[id] == nullptr) {
+        return;
+    }
+
+    size_t referenceCount = mBuffers[id]->decrementReferenceCountLocked(surfaceId);
+    if (referenceCount > 0) {
+        return;
+    }
+
+    // We no longer need to track the buffer now that it is being returned to the
+    // input. Note that this should happen before we unlock the mutex and call
+    // releaseBuffer, to avoid the case where the same bufferId is acquired in
+    // attachBufferToOutputs resulting in a new BufferTracker with same bufferId
+    // overwrites the current one.
+    std::unique_ptr<BufferTracker> tracker_ptr = std::move(mBuffers[id]);
+    mBuffers.erase(id);
+
+    uint64_t bufferId = tracker_ptr->getBuffer()->getId();
+    int consumerSlot = -1;
+    uint64_t frameNumber;
+    auto inputSlot = mInputSlots.begin();
+    for (; inputSlot != mInputSlots.end(); inputSlot++) {
+        if (inputSlot->second.mGraphicBuffer->getId() == bufferId) {
+            consumerSlot = inputSlot->second.mSlot;
+            frameNumber = inputSlot->second.mFrameNumber;
+            break;
+        }
+    }
+    if (consumerSlot == -1) {
+        SP_LOGE("%s: Buffer missing inside input slots!", __FUNCTION__);
+        return;
+    }
+
+    auto detachBuffer = mDetachedBuffers.find(bufferId);
+    bool detach = (detachBuffer != mDetachedBuffers.end());
+    if (detach) {
+        mDetachedBuffers.erase(detachBuffer);
+        mInputSlots.erase(inputSlot);
+    }
+    // Temporarily unlock mutex to avoid circular lock:
+    // 1. This function holds splitter lock, calls releaseBuffer which triggers
+    // onBufferReleased in Camera3OutputStream. onBufferReleased waits on the
+    // OutputStream lock
+    // 2. Camera3SharedOutputStream::getBufferLocked calls
+    // attachBufferToOutputs, which holds the stream lock, and waits for the
+    // splitter lock.
+    sp<IGraphicBufferConsumer> consumer(mConsumer);
+    mMutex.unlock();
+    int res = NO_ERROR;
+    if (consumer != nullptr) {
+        if (detach) {
+            res = consumer->detachBuffer(consumerSlot);
+        } else {
+            res = consumer->releaseBuffer(consumerSlot, frameNumber, EGL_NO_DISPLAY,
+                                          EGL_NO_SYNC_KHR, tracker_ptr->getMergedFence());
+        }
+    } else {
+        SP_LOGE("%s: consumer has become null!", __FUNCTION__);
+    }
+    mMutex.lock();
+
+    if (res != NO_ERROR) {
+        if (detach) {
+            SP_LOGE("%s: detachBuffer returns %d", __FUNCTION__, res);
+        } else {
+            SP_LOGE("%s: releaseBuffer returns %d", __FUNCTION__, res);
+        }
+    } else {
+        if (mAcquiredInputBuffers == 0) {
+            ALOGW("%s: Acquired input buffer count already at zero!", __FUNCTION__);
+        } else {
+            mAcquiredInputBuffers--;
+        }
+    }
+}
+
+void DeprecatedCamera3StreamSplitter::onBufferReleasedByOutput(
+        const sp<IGraphicBufferProducer>& from) {
+    ATRACE_CALL();
+    sp<Fence> fence;
+
+    int slot = BufferItem::INVALID_BUFFER_SLOT;
+    auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage, nullptr,
+                                   nullptr);
+    Mutex::Autolock lock(mMutex);
+    handleOutputDequeueStatusLocked(res, slot);
+    if (res != OK) {
+        return;
+    }
+
+    size_t surfaceId = 0;
+    bool found = false;
+    for (const auto& it : mOutputs) {
+        if (it.second == from) {
+            found = true;
+            surfaceId = it.first;
+            break;
+        }
+    }
+    if (!found) {
+        SP_LOGV("%s: output surface not registered anymore!", __FUNCTION__);
+        return;
+    }
+
+    returnOutputBufferLocked(fence, from, surfaceId, slot);
+}
+
+void DeprecatedCamera3StreamSplitter::onBufferReplacedLocked(const sp<IGraphicBufferProducer>& from,
+                                                             size_t surfaceId) {
+    ATRACE_CALL();
+    sp<Fence> fence;
+
+    int slot = BufferItem::INVALID_BUFFER_SLOT;
+    auto res = from->dequeueBuffer(&slot, &fence, mWidth, mHeight, mFormat, mProducerUsage, nullptr,
+                                   nullptr);
+    handleOutputDequeueStatusLocked(res, slot);
+    if (res != OK) {
+        return;
+    }
+
+    returnOutputBufferLocked(fence, from, surfaceId, slot);
+}
+
+void DeprecatedCamera3StreamSplitter::returnOutputBufferLocked(
+        const sp<Fence>& fence, const sp<IGraphicBufferProducer>& from, size_t surfaceId,
+        int slot) {
+    sp<GraphicBuffer> buffer;
+
+    if (mOutputSlots[from] == nullptr) {
+        // Output surface got likely removed by client.
+        return;
+    }
+
+    auto outputSlots = *mOutputSlots[from];
+    buffer = outputSlots[slot];
+    BufferTracker& tracker = *(mBuffers[buffer->getId()]);
+    // Merge the release fence of the incoming buffer so that the fence we send
+    // back to the input includes all of the outputs' fences
+    if (fence != nullptr && fence->isValid()) {
+        tracker.mergeFence(fence);
+    }
+
+    auto detachBuffer = mDetachedBuffers.find(buffer->getId());
+    bool detach = (detachBuffer != mDetachedBuffers.end());
+    if (detach) {
+        auto res = from->detachBuffer(slot);
+        if (res == NO_ERROR) {
+            outputSlots[slot] = nullptr;
+        } else {
+            SP_LOGE("%s: detach buffer from output failed (%d)", __FUNCTION__, res);
+        }
+    }
+
+    // Check to see if this is the last outstanding reference to this buffer
+    decrementBufRefCountLocked(buffer->getId(), surfaceId);
+}
+
+void DeprecatedCamera3StreamSplitter::handleOutputDequeueStatusLocked(status_t res, int slot) {
+    if (res == NO_INIT) {
+        // If we just discovered that this output has been abandoned, note that,
+        // but we can't do anything else, since buffer is invalid
+        onAbandonedLocked();
+    } else if (res == IGraphicBufferProducer::BUFFER_NEEDS_REALLOCATION) {
+        SP_LOGE("%s: Producer needs to re-allocate buffer!", __FUNCTION__);
+        SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
+    } else if (res == IGraphicBufferProducer::RELEASE_ALL_BUFFERS) {
+        SP_LOGE("%s: All slot->buffer mapping should be released!", __FUNCTION__);
+        SP_LOGE("%s: This should not happen with buffer allocation disabled!", __FUNCTION__);
+    } else if (res == NO_MEMORY) {
+        SP_LOGE("%s: No free buffers", __FUNCTION__);
+    } else if (res == WOULD_BLOCK) {
+        SP_LOGE("%s: Dequeue call will block", __FUNCTION__);
+    } else if (res != OK || (slot == BufferItem::INVALID_BUFFER_SLOT)) {
+        SP_LOGE("%s: dequeue buffer from output failed (%d)", __FUNCTION__, res);
+    }
+}
+
+void DeprecatedCamera3StreamSplitter::onAbandonedLocked() {
+    // If this is called from binderDied callback, it means the app process
+    // holding the binder has died. CameraService will be notified of the binder
+    // death, and camera device will be closed, which in turn calls
+    // disconnect().
+    //
+    // If this is called from onBufferReleasedByOutput or onFrameAvailable, one
+    // consumer being abanoned shouldn't impact the other consumer. So we won't
+    // stop the buffer flow.
+    //
+    // In both cases, we don't need to do anything here.
+    SP_LOGV("One of my outputs has abandoned me");
+}
+
+int DeprecatedCamera3StreamSplitter::getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp,
+                                                            const sp<GraphicBuffer>& gb) {
+    auto& outputSlots = *mOutputSlots[gbp];
+
+    for (size_t i = 0; i < outputSlots.size(); i++) {
+        if (outputSlots[i] == gb) {
+            return (int)i;
+        }
+    }
+
+    SP_LOGV("%s: Cannot find slot for gb %p on output %p", __FUNCTION__, gb.get(), gbp.get());
+    return BufferItem::INVALID_BUFFER_SLOT;
+}
+
+DeprecatedCamera3StreamSplitter::OutputListener::OutputListener(
+        wp<DeprecatedCamera3StreamSplitter> splitter, wp<IGraphicBufferProducer> output)
+    : mSplitter(splitter), mOutput(output) {}
+
+void DeprecatedCamera3StreamSplitter::OutputListener::onBufferReleased() {
+    ATRACE_CALL();
+    sp<DeprecatedCamera3StreamSplitter> splitter = mSplitter.promote();
+    sp<IGraphicBufferProducer> output = mOutput.promote();
+    if (splitter != nullptr && output != nullptr) {
+        splitter->onBufferReleasedByOutput(output);
+    }
+}
+
+void DeprecatedCamera3StreamSplitter::OutputListener::binderDied(const wp<IBinder>& /* who */) {
+    sp<DeprecatedCamera3StreamSplitter> splitter = mSplitter.promote();
+    if (splitter != nullptr) {
+        Mutex::Autolock lock(splitter->mMutex);
+        splitter->onAbandonedLocked();
+    }
+}
+
+DeprecatedCamera3StreamSplitter::BufferTracker::BufferTracker(
+        const sp<GraphicBuffer>& buffer, const std::vector<size_t>& requestedSurfaces)
+    : mBuffer(buffer),
+      mMergedFence(Fence::NO_FENCE),
+      mRequestedSurfaces(requestedSurfaces),
+      mReferenceCount(requestedSurfaces.size()) {}
+
+void DeprecatedCamera3StreamSplitter::BufferTracker::mergeFence(const sp<Fence>& with) {
+    mMergedFence = Fence::merge(String8("DeprecatedCamera3StreamSplitter"), mMergedFence, with);
+}
+
+size_t DeprecatedCamera3StreamSplitter::BufferTracker::decrementReferenceCountLocked(
+        size_t surfaceId) {
+    const auto& it = std::find(mRequestedSurfaces.begin(), mRequestedSurfaces.end(), surfaceId);
+    if (it == mRequestedSurfaces.end()) {
+        return mReferenceCount;
+    } else {
+        mRequestedSurfaces.erase(it);
+    }
+
+    if (mReferenceCount > 0) --mReferenceCount;
+    return mReferenceCount;
+}
+
+}  // namespace android
diff --git a/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
new file mode 100644
index 0000000..4610985
--- /dev/null
+++ b/services/camera/libcameraservice/device3/deprecated/DeprecatedCamera3StreamSplitter.h
@@ -0,0 +1,299 @@
+/*
+ * Copyright 2014,2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_STREAMSPLITTER_H
+#define ANDROID_SERVERS_STREAMSPLITTER_H
+
+#include <unordered_set>
+
+#include <camera/CameraMetadata.h>
+
+#include <gui/BufferItemConsumer.h>
+#include <gui/IConsumerListener.h>
+#include <gui/Surface.h>
+
+#include <utils/Condition.h>
+#include <utils/Mutex.h>
+#include <utils/StrongPointer.h>
+#include <utils/Timers.h>
+
+#define SP_LOGV(x, ...) ALOGV("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGI(x, ...) ALOGI("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGW(x, ...) ALOGW("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+#define SP_LOGE(x, ...) ALOGE("[%s] " x, mConsumerName.c_str(), ##__VA_ARGS__)
+
+namespace android {
+
+class GraphicBuffer;
+class IGraphicBufferConsumer;
+class IGraphicBufferProducer;
+
+// DeprecatedCamera3StreamSplitter is an autonomous class that manages one input BufferQueue
+// and multiple output BufferQueues. By using the buffer attach and detach logic
+// in BufferQueue, it is able to present the illusion of a single split
+// BufferQueue, where each buffer queued to the input is available to be
+// acquired by each of the outputs, and is able to be dequeued by the input
+// again only once all of the outputs have released it.
+class DeprecatedCamera3StreamSplitter : public BnConsumerListener {
+  public:
+    // Constructor
+    DeprecatedCamera3StreamSplitter(bool useHalBufManager = false);
+
+    // Connect to the stream splitter by creating buffer queue and connecting it
+    // with output surfaces.
+    status_t connect(const std::unordered_map<size_t, sp<Surface>>& surfaces,
+                     uint64_t consumerUsage, uint64_t producerUsage, size_t halMaxBuffers,
+                     uint32_t width, uint32_t height, android::PixelFormat format,
+                     sp<Surface>* consumer, int64_t dynamicRangeProfile);
+
+    // addOutput adds an output BufferQueue to the splitter. The splitter
+    // connects to outputQueue as a CPU producer, and any buffers queued
+    // to the input will be queued to each output. If any  output is abandoned
+    // by its consumer, the splitter will abandon its input queue (see onAbandoned).
+    //
+    // A return value other than NO_ERROR means that an error has occurred and
+    // outputQueue has not been added to the splitter. BAD_VALUE is returned if
+    // outputQueue is NULL. See IGraphicBufferProducer::connect for explanations
+    // of other error codes.
+    status_t addOutput(size_t surfaceId, const sp<Surface>& outputQueue);
+
+    // removeOutput will remove a BufferQueue that was previously added to
+    // the splitter outputs. Any pending buffers in the BufferQueue will get
+    // reclaimed.
+    status_t removeOutput(size_t surfaceId);
+
+    // Notification that the graphic buffer has been released to the input
+    // BufferQueue. The buffer should be reused by the camera device instead of
+    // queuing to the outputs.
+    status_t notifyBufferReleased(const sp<GraphicBuffer>& buffer);
+
+    // Attach a buffer to the specified outputs. This call reserves a buffer
+    // slot in the output queue.
+    status_t attachBufferToOutputs(ANativeWindowBuffer* anb,
+                                   const std::vector<size_t>& surface_ids);
+
+    // Get return value of onFrameAvailable to work around problem that
+    // onFrameAvailable is void. This function should be called by the producer
+    // right after calling queueBuffer().
+    status_t getOnFrameAvailableResult();
+
+    // Disconnect the buffer queue from output surfaces.
+    void disconnect();
+
+    void setHalBufferManager(bool enabled);
+
+  private:
+    // From IConsumerListener
+    //
+    // During this callback, we store some tracking information, detach the
+    // buffer from the input, and attach it to each of the outputs. This call
+    // can block if there are too many outstanding buffers. If it blocks, it
+    // will resume when onBufferReleasedByOutput releases a buffer back to the
+    // input.
+    void onFrameAvailable(const BufferItem& item) override;
+
+    // From IConsumerListener
+    //
+    // Similar to onFrameAvailable, but buffer item is indeed replacing a buffer
+    // in the buffer queue. This can happen when buffer queue is in droppable
+    // mode.
+    void onFrameReplaced(const BufferItem& item) override;
+
+    // From IConsumerListener
+    // We don't care about released buffers because we detach each buffer as
+    // soon as we acquire it. See the comment for onBufferReleased below for
+    // some clarifying notes about the name.
+    void onBuffersReleased() override {}
+
+    // From IConsumerListener
+    // We don't care about sideband streams, since we won't be splitting them
+    void onSidebandStreamChanged() override {}
+
+    // This is the implementation of the onBufferReleased callback from
+    // IProducerListener. It gets called from an OutputListener (see below), and
+    // 'from' is which producer interface from which the callback was received.
+    //
+    // During this callback, we detach the buffer from the output queue that
+    // generated the callback, update our state tracking to see if this is the
+    // last output releasing the buffer, and if so, release it to the input.
+    // If we release the buffer to the input, we allow a blocked
+    // onFrameAvailable call to proceed.
+    void onBufferReleasedByOutput(const sp<IGraphicBufferProducer>& from);
+
+    // Called by outputBufferLocked when a buffer in the async buffer queue got replaced.
+    void onBufferReplacedLocked(const sp<IGraphicBufferProducer>& from, size_t surfaceId);
+
+    // When this is called, the splitter disconnects from (i.e., abandons) its
+    // input queue and signals any waiting onFrameAvailable calls to wake up.
+    // It still processes callbacks from other outputs, but only detaches their
+    // buffers so they can continue operating until they run out of buffers to
+    // acquire. This must be called with mMutex locked.
+    void onAbandonedLocked();
+
+    // Decrement the buffer's reference count. Once the reference count becomes
+    // 0, return the buffer back to the input BufferQueue.
+    void decrementBufRefCountLocked(uint64_t id, size_t surfaceId);
+
+    // Check for and handle any output surface dequeue errors.
+    void handleOutputDequeueStatusLocked(status_t res, int slot);
+
+    // Handles released output surface buffers.
+    void returnOutputBufferLocked(const sp<Fence>& fence, const sp<IGraphicBufferProducer>& from,
+                                  size_t surfaceId, int slot);
+
+    // This is a thin wrapper class that lets us determine which BufferQueue
+    // the IProducerListener::onBufferReleased callback is associated with. We
+    // create one of these per output BufferQueue, and then pass the producer
+    // into onBufferReleasedByOutput above.
+    class OutputListener : public SurfaceListener, public IBinder::DeathRecipient {
+      public:
+        OutputListener(wp<DeprecatedCamera3StreamSplitter> splitter,
+                       wp<IGraphicBufferProducer> output);
+        virtual ~OutputListener() = default;
+
+        // From IProducerListener
+        void onBufferReleased() override;
+        bool needsReleaseNotify() override { return true; };
+        void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /*buffers*/) override {};
+        void onBufferDetached(int /*slot*/) override {}
+
+        // From IBinder::DeathRecipient
+        void binderDied(const wp<IBinder>& who) override;
+
+      private:
+        wp<DeprecatedCamera3StreamSplitter> mSplitter;
+        wp<IGraphicBufferProducer> mOutput;
+    };
+
+    class BufferTracker {
+      public:
+        BufferTracker(const sp<GraphicBuffer>& buffer,
+                      const std::vector<size_t>& requestedSurfaces);
+        ~BufferTracker() = default;
+
+        const sp<GraphicBuffer>& getBuffer() const { return mBuffer; }
+        const sp<Fence>& getMergedFence() const { return mMergedFence; }
+
+        void mergeFence(const sp<Fence>& with);
+
+        // Returns the new value
+        // Only called while mMutex is held
+        size_t decrementReferenceCountLocked(size_t surfaceId);
+
+        const std::vector<size_t> requestedSurfaces() const { return mRequestedSurfaces; }
+
+      private:
+        // Disallow copying
+        BufferTracker(const BufferTracker& other);
+        BufferTracker& operator=(const BufferTracker& other);
+
+        sp<GraphicBuffer> mBuffer;  // One instance that holds this native handle
+        sp<Fence> mMergedFence;
+
+        // Request surfaces for a particular buffer. And when the buffer becomes
+        // available from the input queue, the registered surfaces are used to decide
+        // which output is the buffer sent to.
+        std::vector<size_t> mRequestedSurfaces;
+        size_t mReferenceCount;
+    };
+
+    // Must be accessed through RefBase
+    virtual ~DeprecatedCamera3StreamSplitter();
+
+    status_t addOutputLocked(size_t surfaceId, const sp<Surface>& outputQueue);
+
+    status_t removeOutputLocked(size_t surfaceId);
+
+    // Send a buffer to particular output, and increment the reference count
+    // of the buffer. If this output is abandoned, the buffer's reference count
+    // won't be incremented.
+    status_t outputBufferLocked(const sp<IGraphicBufferProducer>& output,
+                                const BufferItem& bufferItem, size_t surfaceId);
+
+    // Get unique name for the buffer queue consumer
+    std::string getUniqueConsumerName();
+
+    // Helper function to get the BufferQueue slot where a particular buffer is attached to.
+    int getSlotForOutputLocked(const sp<IGraphicBufferProducer>& gbp, const sp<GraphicBuffer>& gb);
+
+    // Sum of max consumer buffers for all outputs
+    size_t mMaxConsumerBuffers = 0;
+    size_t mMaxHalBuffers = 0;
+    uint32_t mWidth = 0;
+    uint32_t mHeight = 0;
+    android::PixelFormat mFormat = android::PIXEL_FORMAT_NONE;
+    uint64_t mProducerUsage = 0;
+    int mDynamicRangeProfile = ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD;
+
+    // The attachBuffer call will happen on different thread according to mUseHalBufManager and have
+    // different timing constraint.
+    static const nsecs_t kNormalDequeueBufferTimeout = s2ns(1);      // 1 sec
+    static const nsecs_t kHalBufMgrDequeueBufferTimeout = ms2ns(1);  // 1 msec
+
+    Mutex mMutex;
+
+    sp<IGraphicBufferProducer> mProducer;
+    sp<IGraphicBufferConsumer> mConsumer;
+    sp<BufferItemConsumer> mBufferItemConsumer;
+    sp<Surface> mSurface;
+
+    // Map graphic buffer ids -> buffer items
+    std::unordered_map<uint64_t, BufferItem> mInputSlots;
+
+    // Map surface ids -> gbp outputs
+    std::unordered_map<int, sp<IGraphicBufferProducer>> mOutputs;
+
+    // Map surface ids -> gbp outputs
+    std::unordered_map<int, sp<Surface>> mOutputSurfaces;
+
+    // Map surface ids -> consumer buffer count
+    std::unordered_map<int, size_t> mConsumerBufferCount;
+
+    // Map of GraphicBuffer IDs (GraphicBuffer::getId()) to buffer tracking
+    // objects (which are mostly for counting how many outputs have released the
+    // buffer, but also contain merged release fences).
+    std::unordered_map<uint64_t, std::unique_ptr<BufferTracker>> mBuffers;
+
+    struct GBPHash {
+        std::size_t operator()(const sp<IGraphicBufferProducer>& producer) const {
+            return std::hash<IGraphicBufferProducer*>{}(producer.get());
+        }
+    };
+
+    std::unordered_map<sp<IGraphicBufferProducer>, sp<OutputListener>, GBPHash> mNotifiers;
+
+    typedef std::vector<sp<GraphicBuffer>> OutputSlots;
+    std::unordered_map<sp<IGraphicBufferProducer>, std::unique_ptr<OutputSlots>, GBPHash>
+            mOutputSlots;
+
+    // A set of buffers that could potentially stay in some of the outputs after removal
+    // and therefore should be detached from the input queue.
+    std::unordered_set<uint64_t> mDetachedBuffers;
+
+    // Latest onFrameAvailable return value
+    std::atomic<status_t> mOnFrameAvailableRes{0};
+
+    // Currently acquired input buffers
+    size_t mAcquiredInputBuffers;
+
+    std::string mConsumerName;
+
+    bool mUseHalBufManager;
+};
+
+}  // namespace android
+
+#endif
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index f2e618f..6986d3c 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -56,6 +56,7 @@
 #include "device3/hidl/HidlCamera3OfflineSession.h"
 #include "utils/SessionConfigurationUtilsHidl.h"
 #include "utils/TraceHFR.h"
+#include "utils/Utils.h"
 
 #include "../../common/hidl/HidlProviderInfo.h"
 #include "HidlCamera3Device.h"
@@ -166,7 +167,7 @@
     }
 
     res = manager->getCameraCharacteristics(mId, mOverrideForPerfClass, &mDeviceInfo,
-            /*overrideToPortrait*/false);
+            hardware::ICameraService::ROTATION_OVERRIDE_NONE);
     if (res != OK) {
         SET_ERR_L("Could not retrieve camera characteristics: %s (%d)", strerror(-res), res);
         session->close();
@@ -181,7 +182,7 @@
             // Do not override characteristics for physical cameras
             res = manager->getCameraCharacteristics(
                     physicalId, /*overrideForPerfClass*/false, &mPhysicalDeviceInfoMap[physicalId],
-                    /*overrideToPortrait*/false);
+                    hardware::ICameraService::ROTATION_OVERRIDE_NONE);
             if (res != OK) {
                 SET_ERR_L("Could not retrieve camera %s characteristics: %s (%d)",
                         physicalId.c_str(), strerror(-res), res);
@@ -302,7 +303,7 @@
     }
     mNeedFixupMonochromeTags = (isMonochrome && deviceVersion < CAMERA_DEVICE_API_VERSION_3_5);
 
-    return initializeCommonLocked();
+    return initializeCommonLocked(manager);
 }
 
 hardware::Return<void> HidlCamera3Device::requestStreamBuffers(
@@ -370,7 +371,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
 
@@ -433,7 +434,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
 
@@ -481,7 +482,7 @@
         mNumPartialResults, mVendorTagId, mDeviceInfo, mPhysicalDeviceInfoMap,
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
-        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mOverrideToPortrait,
+        *mInterface, mLegacyClient, mMinExpectedDuration, mIsFixedFps, mRotationOverride,
         mActivePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
@@ -717,10 +718,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) {
         return new HidlRequestThread(parent, statusTracker, interface, sessionParamKeys,
-                useHalBufManager, supportCameraMute, overrideToPortrait,
+                useHalBufManager, supportCameraMute, rotationOverride,
                 supportSettingsOverride);
 };
 
@@ -881,7 +882,7 @@
                     ret = true;
                     break;
                 default:
-                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, callStatus);
+                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, eToI(callStatus));
                     ret = true;
             }
         } else {
@@ -928,7 +929,7 @@
         switch (src->stream_type) {
             case CAMERA_STREAM_OUTPUT:
                 streamType = StreamType::OUTPUT;
-                if (flags::session_hal_buf_manager() && mUseHalBufManager) {
+                if (mUseHalBufManager) {
                     mHalBufManagedStreamIds.insert(streamId);
                 }
                 break;
@@ -1721,10 +1722,10 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) :
           RequestThread(parent, statusTracker, interface, sessionParamKeys, useHalBufManager,
-                  supportCameraMute, overrideToPortrait, supportSettingsOverride) {}
+                  supportCameraMute, rotationOverride, supportSettingsOverride) {}
 
 status_t HidlCamera3Device::HidlRequestThread::switchToOffline(
         const std::vector<int32_t>& streamsToKeep,
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
index f11db5d..bcc4d80 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.h
@@ -34,10 +34,10 @@
     explicit HidlCamera3Device(
         std::shared_ptr<CameraServiceProxyWrapper>& cameraServiceProxyWrapper,
         std::shared_ptr<AttributionAndPermissionUtils> attributionAndPermissionUtils,
-        const std::string& id, bool overrideForPerfClass, bool overrideToPortrait,
+        const std::string& id, bool overrideForPerfClass, int rotationOverride,
         bool legacyClient = false) :
         Camera3Device(cameraServiceProxyWrapper, attributionAndPermissionUtils, id,
-                overrideForPerfClass, overrideToPortrait, legacyClient) { }
+                overrideForPerfClass, rotationOverride, legacyClient) { }
 
     virtual ~HidlCamera3Device() {}
 
@@ -179,7 +179,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride);
 
         status_t switchToOffline(
@@ -232,7 +232,7 @@
                 const Vector<int32_t>& sessionParamKeys,
                 bool useHalBufManager,
                 bool supportCameraMute,
-                bool overrideToPortrait,
+                int rotationOverride,
                 bool supportSettingsOverride) override;
 
     virtual sp<Camera3DeviceInjectionMethods>
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
index aa4b762..c26583e 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3OfflineSession.cpp
@@ -22,6 +22,7 @@
 
 #include <utils/Trace.h>
 
+#include <android/hardware/ICameraService.h>
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/StringUtils.h>
 
@@ -108,7 +109,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -150,7 +151,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
 
     std::lock_guard<std::mutex> lock(mProcessCaptureResultLock);
@@ -187,7 +188,7 @@
         mDistortionMappers, mZoomRatioMappers, mRotateAndCropMappers,
         mTagMonitor, mInputStream, mOutputStreams, mSessionStatsBuilder, listener, *this, *this,
         mBufferRecords, /*legacyClient*/ false, mMinExpectedDuration, mIsFixedFps,
-        /*overrideToPortrait*/false, activePhysicalId}, mResultMetadataQueue
+        hardware::ICameraService::ROTATION_OVERRIDE_NONE, activePhysicalId}, mResultMetadataQueue
     };
     for (const auto& msg : msgs) {
         camera3::notify(states, msg);
diff --git a/services/camera/libcameraservice/fuzzer/Android.bp b/services/camera/libcameraservice/fuzzer/Android.bp
index 7760f6a..667ba02 100644
--- a/services/camera/libcameraservice/fuzzer/Android.bp
+++ b/services/camera/libcameraservice/fuzzer/Android.bp
@@ -26,7 +26,18 @@
 cc_defaults {
     name: "libcameraservice_fuzz_defaults",
     fuzz_config: {
-        componentid: 41727
+        cc: [
+            "android-camera-fwk-eng@google.com",
+        ],
+        componentid: 41727,
+        hotlists: [
+            "4593311",
+        ],
+        description: "The fuzzer targets the APIs of libcameraservice",
+        vector: "local_no_privileges_required",
+        service_privilege: "privileged",
+        users: "multi_user",
+        fuzzed_code_usage: "shipped",
     },
 }
 
@@ -37,9 +48,9 @@
         "DistortionMapperFuzzer.cpp",
     ],
     shared_libs: [
-        "libcameraservice",
+        "camera_platform_flags_c_lib",
         "libcamera_client",
-        "camera_platform_flags_c_lib"
+        "libcameraservice",
     ],
 }
 
@@ -50,8 +61,8 @@
         "DepthProcessorFuzzer.cpp",
     ],
     shared_libs: [
+        "camera_platform_flags_c_lib",
         "libcameraservice",
-        "camera_platform_flags_c_lib"
     ],
     corpus: ["corpus/*.jpg"],
 }
diff --git a/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
index 650ca91..5c5e177 100644
--- a/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
+++ b/services/camera/libcameraservice/fuzzer/DepthProcessorFuzzer.cpp
@@ -14,49 +14,92 @@
  * limitations under the License.
  */
 
-#include <array>
-#include <vector>
+#include "common/DepthPhotoProcessor.h"
+
+#include <random>
 
 #include <fuzzer/FuzzedDataProvider.h>
 
-#include "common/DepthPhotoProcessor.h"
-
 using namespace android;
 using namespace android::camera3;
 
-static const size_t kTestBufferWidth = 640;
-static const size_t kTestBufferHeight = 480;
-static const size_t kTestBufferDepthSize (kTestBufferWidth * kTestBufferHeight);
+static const float kMinRatio = 0.1f;
+static const float kMaxRatio = 0.9f;
 
-void generateDepth16Buffer(const uint8_t* data, size_t size, std::array<uint16_t, kTestBufferDepthSize> *depth16Buffer /*out*/) {
-    FuzzedDataProvider dataProvider(data, size);
-    for (size_t i = 0; i < depth16Buffer->size(); i++) {
-        (*depth16Buffer)[i] = dataProvider.ConsumeIntegral<uint16_t>();
+static const uint8_t kTotalDepthJpegBufferCount = 3;
+static const uint8_t kIntrinsicCalibrationSize = 5;
+static const uint8_t kLensDistortionSize = 5;
+
+static const DepthPhotoOrientation kDepthPhotoOrientations[] = {
+        DepthPhotoOrientation::DEPTH_ORIENTATION_0_DEGREES,
+        DepthPhotoOrientation::DEPTH_ORIENTATION_90_DEGREES,
+        DepthPhotoOrientation::DEPTH_ORIENTATION_180_DEGREES,
+        DepthPhotoOrientation::DEPTH_ORIENTATION_270_DEGREES};
+
+void generateDepth16Buffer(std::vector<uint16_t>* depth16Buffer /*out*/, size_t length,
+                           FuzzedDataProvider& fdp) {
+    std::default_random_engine gen(fdp.ConsumeIntegral<uint8_t>());
+    std::uniform_int_distribution uniDist(0, UINT16_MAX - 1);
+    for (size_t i = 0; i < length; ++i) {
+        (*depth16Buffer)[i] = uniDist(gen);
     }
 }
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    FuzzedDataProvider fdp(data, size);
+
     DepthPhotoInputFrame inputFrame;
+
+    /**
+     * Consuming 80% of the data to set mMainJpegBuffer. This ensures that we
+     * don't completely exhaust data and use the rest 20% for fuzzing of APIs.
+     */
+    std::vector<uint8_t> buffer = fdp.ConsumeBytes<uint8_t>((size * 80) / 100);
+    inputFrame.mMainJpegBuffer = reinterpret_cast<const char*>(buffer.data());
+
+    /**
+     * Calculate height and width based on buffer size and a ratio within [0.1, 0.9].
+     * The ratio adjusts the dimensions while maintaining a relationship to the total buffer size.
+     */
+    const float ratio = fdp.ConsumeFloatingPointInRange<float>(kMinRatio, kMaxRatio);
+    const size_t height = std::sqrt(buffer.size()) * ratio;
+    const size_t width = std::sqrt(buffer.size()) / ratio;
+
+    inputFrame.mMainJpegHeight = height;
+    inputFrame.mMainJpegWidth = width;
+    inputFrame.mMainJpegSize = buffer.size();
     // Worst case both depth and confidence maps have the same size as the main color image.
-    inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
+    inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * kTotalDepthJpegBufferCount;
+
+    std::vector<uint16_t> depth16Buffer(height * width);
+    generateDepth16Buffer(&depth16Buffer, height * width, fdp);
+    inputFrame.mDepthMapBuffer = depth16Buffer.data();
+    inputFrame.mDepthMapHeight = height;
+    inputFrame.mDepthMapWidth = inputFrame.mDepthMapStride = width;
+
+    inputFrame.mIsLogical = fdp.ConsumeBool();
+
+    inputFrame.mOrientation = fdp.PickValueInArray<DepthPhotoOrientation>(kDepthPhotoOrientations);
+
+    if (fdp.ConsumeBool()) {
+        for (uint8_t i = 0; i < kIntrinsicCalibrationSize; ++i) {
+            inputFrame.mIntrinsicCalibration[i] = fdp.ConsumeFloatingPoint<float>();
+        }
+        inputFrame.mIsIntrinsicCalibrationValid = 1;
+    }
+
+    if (fdp.ConsumeBool()) {
+        for (uint8_t i = 0; i < kLensDistortionSize; ++i) {
+            inputFrame.mLensDistortion[i] = fdp.ConsumeFloatingPoint<float>();
+        }
+        inputFrame.mIsLensDistortionValid = 1;
+    }
 
     std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
     size_t actualDepthPhotoSize = 0;
 
-    std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
-    generateDepth16Buffer(data, size, &depth16Buffer);
+    processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+                           &actualDepthPhotoSize);
 
-    inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (data);
-    inputFrame.mMainJpegSize = size;
-    inputFrame.mDepthMapBuffer = depth16Buffer.data();
-    inputFrame.mDepthMapStride = kTestBufferWidth;
-    inputFrame.mDepthMapWidth = kTestBufferWidth;
-    inputFrame.mDepthMapHeight = kTestBufferHeight;
-    processDepthPhotoFrame(
-        inputFrame,
-        depthPhotoBuffer.size(),
-        depthPhotoBuffer.data(),
-        &actualDepthPhotoSize);
-
-  return 0;
+    return 0;
 }
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index 5dbfb36..c968e44 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -18,12 +18,14 @@
 #define LOG_TAG "RingBufferConsumer"
 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
 
+#include <com_android_graphics_libgui_flags.h>
 #include <inttypes.h>
 
 #include <utils/Log.h>
 
-#include <gui/RingBufferConsumer.h>
 #include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/RingBufferConsumer.h>
 
 #define BI_LOGV(x, ...) ALOGV("[%s] " x, mName.c_str(), ##__VA_ARGS__)
 #define BI_LOGD(x, ...) ALOGD("[%s] " x, mName.c_str(), ##__VA_ARGS__)
@@ -38,13 +40,14 @@
 
 namespace android {
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+RingBufferConsumer::RingBufferConsumer(uint64_t consumerUsage, int bufferCount)
+    : ConsumerBase(), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#else
 RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
-        uint64_t consumerUsage,
-        int bufferCount) :
-    ConsumerBase(consumer),
-    mBufferCount(bufferCount),
-    mLatestTimestamp(0)
-{
+                                       uint64_t consumerUsage, int bufferCount)
+    : ConsumerBase(consumer), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mConsumer->setConsumerUsageBits(consumerUsage);
     mConsumer->setMaxAcquiredBufferCount(bufferCount);
 
@@ -317,7 +320,9 @@
 
         mLatestTimestamp = item.mTimestamp;
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_RING_BUFFER)
         item.mGraphicBuffer = mSlots[item.mSlot].mGraphicBuffer;
+#endif
     } // end of mMutex lock
 
     ConsumerBase::onFrameAvailable(item);
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 2e523d1..9fdc996 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -17,9 +17,10 @@
 #ifndef ANDROID_GUI_RINGBUFFERCONSUMER_H
 #define ANDROID_GUI_RINGBUFFERCONSUMER_H
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
-#include <gui/ConsumerBase.h>
 #include <gui/BufferQueue.h>
+#include <gui/ConsumerBase.h>
 
 #include <utils/List.h>
 
@@ -58,8 +59,12 @@
     // the consumer usage flags passed to the graphics allocator. The
     // bufferCount parameter specifies how many buffers can be pinned for user
     // access at the same time.
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    RingBufferConsumer(uint64_t consumerUsage, int bufferCount);
+#else
     RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint64_t consumerUsage,
             int bufferCount);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     virtual ~RingBufferConsumer();
 
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 8f25ad6..59e892f 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -26,6 +26,7 @@
 #include <hidl/HidlTransportSupport.h>
 
 #include <camera/CameraUtils.h>
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
 
 namespace android {
@@ -37,6 +38,7 @@
 
 using frameworks::cameraservice::service::V2_0::implementation::HidlCameraService;
 using hardware::hidl_vec;
+using hardware::BnCameraService::ROTATION_OVERRIDE_NONE;
 using hardware::cameraservice::utils::conversion::convertToHidl;
 using hardware::cameraservice::utils::conversion::B2HStatus;
 using hardware::Void;
@@ -67,10 +69,15 @@
                                             getCameraCharacteristics_cb _hidl_cb) {
     android::CameraMetadata cameraMetadata;
     HStatus status = HStatus::NO_ERROR;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(cameraId,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, 0, &cameraMetadata);
+                /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+                clientAttribution, 0, &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -118,11 +125,17 @@
         return Void();
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
     binder::Status serviceRet = mAidlICameraService->connectDevice(
-            callbacks, cameraId, std::string(), {},
-            hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-            kDefaultDeviceId, /*devicePolicy*/0, /*out*/&deviceRemote);
+            callbacks, cameraId, 0/*oomScoreOffset*/,
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+            clientAttribution, /*devicePolicy*/0, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index c710671..3858410 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -32,9 +32,16 @@
 cc_defaults {
     name: "camera_service_fuzzer_defaults",
     header_libs: [
+        "libaudioflinger_headers",
+        "libaudiohal_headers",
+        "libaudioutils_headers",
+        "libbinder_headers",
         "libmedia_headers",
+        "libmediautils_headers",
+        "mediautils_headers",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libbinder",
         "libbase",
         "libutils",
@@ -58,7 +65,22 @@
         "android.hardware.camera.device@3.6",
         "android.hardware.camera.device@3.7",
         "camera_platform_flags_c_lib",
+        "libactivitymanager_aidl",
+        "libaudioclient",
+        "libaudioflinger",
+        "libaudiohal",
+        "libaudioprocessing",
+        "libmediaplayerservice",
+        "libmediautils",
+        "libnbaio",
+        "libnblog",
+        "libpermission",
+        "libpowermanager",
+        "libsensorprivacy",
+        "libvibrator",
+        "packagemanager_aidl-cpp",
     ],
+    static_libs: ["libbinder_random_parcel"],
     fuzz_config: {
         cc: [
             "android-camera-fwk-eng@google.com",
@@ -84,8 +106,11 @@
     srcs: [
         "camera_service_fuzzer.cpp",
     ],
+    static_libs: [
+        "libfakeservicemanager",
+    ],
     defaults: [
-        "camera_service_fuzzer_defaults"
+        "camera_service_fuzzer_defaults",
     ],
 }
 
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index 778b428..12ac33f 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -21,18 +21,29 @@
 #define LOG_TAG "CameraServiceFuzzer"
 //#define LOG_NDEBUG 0
 
+#include <AudioFlinger.h>
 #include <CameraService.h>
-#include <device3/Camera3StreamInterface.h>
+#include <ISchedulingPolicyService.h>
+#include <MediaPlayerService.h>
+#include <android-base/logging.h>
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
-#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
+#include <binder/IActivityManager.h>
+#include <binder/IAppOpsService.h>
 #include <camera/CameraUtils.h>
 #include <camera/camera2/OutputConfiguration.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <device3/Camera3StreamInterface.h>
+#include <fakeservicemanager/FakeServiceManager.h>
+#include <fuzzbinder/random_binder.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
+#include <media/IAudioFlinger.h>
 #include <private/android_filesystem_config.h>
 #include "fuzzer/FuzzedDataProvider.h"
 
@@ -40,6 +51,9 @@
 using namespace hardware;
 using namespace std;
 
+using ICameraService::ROTATION_OVERRIDE_NONE;
+using ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT;
+
 const int32_t kPreviewThreshold = 8;
 const int32_t kNumRequestsTested = 8;
 const nsecs_t kPreviewTimeout = 5000000000;  // .5 [s.]
@@ -95,6 +109,196 @@
 const size_t kNumCameraMsg = size(kCameraMsg);
 const size_t kNumSoundKind = size(kSoundKind);
 const size_t kNumShellCmd = size(kShellCmd);
+static std::once_flag gSmOnce;
+sp<CameraService> gCameraService;
+
+void addService(const String16& serviceName, const sp<FakeServiceManager>& fakeServiceManager,
+                FuzzedDataProvider* fdp) {
+    sp<IBinder> binder = getRandomBinder(fdp);
+    if (!binder) {
+        return;
+    }
+
+    CHECK_EQ(NO_ERROR, fakeServiceManager->addService(serviceName, binder));
+    return;
+}
+
+class FuzzerActivityManager : public BnInterface<IActivityManager> {
+  public:
+    int32_t openContentUri(const String16& /*stringUri*/) override { return 0; }
+
+    status_t registerUidObserver(const sp<IUidObserver>& /*observer*/, const int32_t /*event*/,
+                                 const int32_t /*cutpoint*/,
+                                 const String16& /*callingPackage*/) override {
+        return OK;
+    }
+
+    status_t unregisterUidObserver(const sp<IUidObserver>& /*observer*/) override { return OK; }
+
+    status_t registerUidObserverForUids(const sp<IUidObserver>& /*observer*/,
+                                        const int32_t /*event*/, const int32_t /*cutpoint*/,
+                                        const String16& /*callingPackage*/,
+                                        const int32_t* /*uids[]*/, size_t /*nUids*/,
+                                        /*out*/ sp<IBinder>& /*observerToken*/) override {
+        return OK;
+    }
+
+    status_t addUidToObserver(const sp<IBinder>& /*observerToken*/,
+                              const String16& /*callingPackage*/, int32_t /*uid*/) override {
+        return OK;
+    }
+
+    status_t removeUidFromObserver(const sp<IBinder>& /*observerToken*/,
+                                   const String16& /*callingPackage*/, int32_t /*uid*/) override {
+        return OK;
+    }
+
+    bool isUidActive(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return true;
+    }
+
+    int32_t getUidProcessState(const uid_t /*uid*/, const String16& /*callingPackage*/) override {
+        return ActivityManager::PROCESS_STATE_UNKNOWN;
+    }
+
+    status_t checkPermission(const String16& /*permission*/, const pid_t /*pid*/,
+                             const uid_t /*uid*/, int32_t* /*outResult*/) override {
+        return NO_ERROR;
+    }
+
+    status_t logFgsApiBegin(int32_t /*apiType*/, int32_t /*appUid*/, int32_t /*appPid*/) override {
+        return OK;
+    }
+    status_t logFgsApiEnd(int32_t /*apiType*/, int32_t /*appUid*/, int32_t /*appPid*/) override {
+        return OK;
+    }
+    status_t logFgsApiStateChanged(int32_t /*apiType*/, int32_t /*state*/, int32_t /*appUid*/,
+                                   int32_t /*appPid*/) override {
+        return OK;
+    }
+};
+
+class FuzzerSensorPrivacyManager : public BnInterface<hardware::ISensorPrivacyManager> {
+  public:
+    binder::Status supportsSensorToggle(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                        bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status addSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return binder::Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    binder::Status addToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status removeSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return binder::Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    binder::Status removeToggleSensorPrivacyListener(
+            const sp<hardware::ISensorPrivacyListener>& /*listener*/) override {
+        return binder::Status::fromStatusT(::android::UNKNOWN_TRANSACTION);
+    }
+    binder::Status isSensorPrivacyEnabled(bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status isCombinedToggleSensorPrivacyEnabled(int32_t /*sensor*/,
+                                                        bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status isToggleSensorPrivacyEnabled(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                                bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setSensorPrivacy(bool /*enable*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setToggleSensorPrivacy(int32_t /*userId*/, int32_t /*source*/,
+                                          int32_t /*sensor*/, bool /*enable*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setToggleSensorPrivacyForProfileGroup(int32_t /*userId*/, int32_t /*source*/,
+                                                         int32_t /*sensor*/,
+                                                         bool /*enable*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status getCameraPrivacyAllowlist(
+            ::std::vector<::android::String16>* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status getToggleSensorPrivacyState(int32_t /*toggleType*/, int32_t /*sensor*/,
+                                               int32_t* /* _aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setToggleSensorPrivacyState(int32_t /*userId*/, int32_t /*source*/,
+                                               int32_t /*sensor*/, int32_t /*state*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status setToggleSensorPrivacyStateForProfileGroup(int32_t /*userId*/,
+                                                              int32_t /*source*/,
+                                                              int32_t /*sensor*/,
+                                                              int32_t /*state*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+    binder::Status isCameraPrivacyEnabled(const ::android::String16& /*packageName*/,
+                                          bool* /*_aidl_return*/) override {
+        return binder::Status::fromStatusT(UNKNOWN_TRANSACTION);
+    }
+};
+
+class FuzzAppOpsService : public BnAppOpsService {
+  public:
+    int32_t checkOperation(int32_t /*code*/, int32_t /*uid*/,
+                           const String16& /*packageName*/) override {
+        return 0;
+    }
+
+    int32_t noteOperation(int32_t /*code*/, int32_t /*uid*/, const String16& /*packageName*/,
+                          const std::optional<String16>& /*attributionTag*/,
+                          bool /*shouldCollectAsyncNotedOp*/, const String16& /*message*/,
+                          bool /*shouldCollectMessage*/) override {
+        return 0;
+    }
+
+    void startWatchingModeWithFlags(int32_t /*op*/, const String16& /*packageName*/,
+                                    int32_t /*flags*/,
+                                    const sp<IAppOpsCallback>& /*callback*/) override {
+        return;
+    }
+
+    int32_t startOperation(const sp<IBinder>& /*token*/, int32_t /*code*/, int32_t /*uid*/,
+                           const String16& /*packageName*/,
+                           const std::optional<String16>& /*attributionTag*/,
+                           bool /*startIfModeDefault*/, bool /*shouldCollectAsyncNotedOp*/,
+                           const String16& /*message*/, bool /*shouldCollectMessage*/) override {
+        return 0;
+    }
+
+    void finishOperation(const sp<IBinder>& /*token*/, int32_t /*code*/, int32_t /*uid*/,
+                         const String16& /*packageName*/,
+                         const std::optional<String16>& /*attributionTag*/) override {
+        return;
+    }
+
+    void startWatchingMode(int32_t /*op*/, const String16& /*packageName*/,
+                           const sp<IAppOpsCallback>& /*callback*/) override {
+        return;
+    }
+
+    void stopWatchingMode(const sp<IAppOpsCallback>& /*callback*/) override { return; }
+
+    int32_t permissionToOpCode(const String16& /*permission*/) override { return 0; }
+
+    int32_t checkAudioOperation(int32_t /*code*/, int32_t /*usage*/, int32_t /*uid*/,
+                                const String16& /*packageName*/) override {
+        return 0;
+    }
+
+    void setCameraAudioRestriction(int32_t /*mode*/) override { return; }
+
+    bool shouldCollectNotes(int32_t /*opCode*/) override { return true; }
+};
 
 class CameraFuzzer : public ::android::hardware::BnCameraClient {
    public:
@@ -216,7 +420,9 @@
     } else {
         camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
     }
-    mCameraService->getNumberOfCameras(camType, kDefaultDeviceId, /*devicePolicy*/0, &mNumCameras);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    mCameraService->getNumberOfCameras(camType, clientAttribution, /*devicePolicy*/0, &mNumCameras);
 }
 
 void CameraFuzzer::getCameraInformation(int32_t cameraId) {
@@ -235,14 +441,17 @@
     hardware::camera2::params::VendorTagDescriptorCache cache;
     mCameraService->getCameraVendorTagCache(&cache);
 
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+    mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, clientAttribution,
             /*devicePolicy*/0, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
-            /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-            kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+            /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
+            clientAttribution, /*devicePolicy*/0, &metadata);
 }
 
 void CameraFuzzer::invokeCameraSound() {
@@ -324,13 +533,15 @@
     std::string cameraIdStr = std::to_string(cameraId);
     sp<IBinder> binder = new BBinder;
 
-    mCameraService->setTorchMode(cameraIdStr, true, binder, kDefaultDeviceId, /*devicePolicy*/0);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    mCameraService->setTorchMode(cameraIdStr, true, binder, clientAttribution, /*devicePolicy*/0);
     ALOGV("Turned torch on.");
     int32_t torchStrength = rand() % 5 + 1;
     ALOGV("Changing torch strength level to %d", torchStrength);
     mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder,
-            kDefaultDeviceId, /*devicePolicy*/0);
-    mCameraService->setTorchMode(cameraIdStr, false, binder, kDefaultDeviceId, /*devicePolicy*/0);
+            clientAttribution, /*devicePolicy*/0);
+    mCameraService->setTorchMode(cameraIdStr, false, binder, clientAttribution, /*devicePolicy*/0);
     ALOGV("Turned torch off.");
 }
 
@@ -346,12 +557,15 @@
     ::android::binder::Status rc;
     sp<ICamera> cameraDevice;
 
-    rc = mCameraService->connect(this, cameraId, std::string(),
-                                 android::CameraService::USE_CALLING_UID,
-                                 android::CameraService::USE_CALLING_PID,
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+    clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+    rc = mCameraService->connect(this, cameraId,
                                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
-                                 /*overrideToPortrait*/true, /*forceSlowJpegMode*/false,
-                                 kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
+                                 ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+                                 /*forceSlowJpegMode*/false,
+                                 clientAttribution, /*devicePolicy*/0, &cameraDevice);
     if (!rc.isOk()) {
         // camera not connected
         return;
@@ -586,14 +800,37 @@
     for (auto s : statuses) {
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/true,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
+        clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+        clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+        mCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
+                clientAttribution, /*devicePolicy*/0, &device);
         if (device == nullptr) {
             continue;
         }
         device->beginConfigure();
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 8, /*controlledByApp*/ true);
+        opaqueConsumer->setName(String8("Roger"));
+
+        // Set to VGA dimension for default, as that is guaranteed to be present
+        opaqueConsumer->setDefaultBufferSize(640, 480);
+        opaqueConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        sp<Surface> surface = opaqueConsumer->getSurface();
+
+        std::string noPhysicalId;
+        size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
+        sp<IGraphicBufferProducer> igbp = surface->getIGraphicBufferProducer();
+        OutputConfiguration output(
+                igbp, kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
+                noPhysicalId);
+#else
         sp<IGraphicBufferProducer> gbProducer;
         sp<IGraphicBufferConsumer> gbConsumer;
         BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
@@ -612,6 +849,7 @@
         OutputConfiguration output(gbProducer,
                 kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
                 noPhysicalId);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         int streamId;
         device->createStream(output, &streamId);
         CameraMetadata sessionParams;
@@ -648,14 +886,38 @@
     }
     setuid(AID_CAMERASERVER);
     std::shared_ptr<FuzzedDataProvider> fp = std::make_shared<FuzzedDataProvider>(data, size);
-    sp<CameraService> cs = new CameraService();
-    cs->clearCachedVariables();
-    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer(cs, fp);
+
+    std::call_once(gSmOnce, [&] {
+        /* Create a FakeServiceManager instance and add required services */
+        sp<FakeServiceManager> fsm = sp<FakeServiceManager>::make();
+        setDefaultServiceManager(fsm);
+        for (const char* service :
+             {"sensor_privacy", "permission", "media.camera.proxy", "batterystats", "media.metrics",
+              "media.extractor", "drm.drmManager", "permission_checker"}) {
+            addService(String16(service), fsm, fp.get());
+        }
+        const auto audioFlinger = sp<AudioFlinger>::make();
+        const auto afAdapter = sp<AudioFlingerServerAdapter>::make(audioFlinger);
+        CHECK_EQ(NO_ERROR,
+                 fsm->addService(String16(IAudioFlinger::DEFAULT_SERVICE_NAME),
+                                 IInterface::asBinder(afAdapter), false /* allowIsolated */,
+                                 IServiceManager::DUMP_FLAG_PRIORITY_DEFAULT));
+        sp<FuzzerActivityManager> am = new FuzzerActivityManager();
+        CHECK_EQ(NO_ERROR, fsm->addService(String16("activity"), IInterface::asBinder(am)));
+        sp<FuzzerSensorPrivacyManager> sensorPrivacyManager = new FuzzerSensorPrivacyManager();
+        CHECK_EQ(NO_ERROR, fsm->addService(String16("sensor_privacy"),
+                                           IInterface::asBinder(sensorPrivacyManager)));
+        sp<FuzzAppOpsService> appops = new FuzzAppOpsService();
+        CHECK_EQ(NO_ERROR, fsm->addService(String16("appops"), IInterface::asBinder(appops)));
+        MediaPlayerService::instantiate();
+        gCameraService = new CameraService();
+    });
+    sp<CameraFuzzer> camerafuzzer = new CameraFuzzer(gCameraService, fp);
     if (!camerafuzzer) {
         return 0;
     }
     camerafuzzer->process();
-    Camera2Fuzzer camera2fuzzer(cs, fp);
+    Camera2Fuzzer camera2fuzzer(gCameraService, fp);
     camera2fuzzer.process();
     return 0;
 }
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..4c56959
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..fc0e371
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10
new file mode 100644
index 0000000..1266b3e
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-10
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11
new file mode 100644
index 0000000..cb1c0e4
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-11
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12
new file mode 100644
index 0000000..ab820a4
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-12
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13
new file mode 100644
index 0000000..6051e9a
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-13
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14
new file mode 100644
index 0000000..596e55b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-14
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15
new file mode 100644
index 0000000..20d7dcb
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-15
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..5bbfa56
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..cd148f6
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..e4ddb50
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..3be3ce1
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..3b51e41
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..3b929df
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8
new file mode 100644
index 0000000..f92337b
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-8
Binary files differ
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9 b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9
new file mode 100644
index 0000000..0fe0f06
--- /dev/null
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/corpus/seed-2024-08-29-9
Binary files differ
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 55e2c9d..837bf6d 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -30,7 +30,7 @@
         "ExifUtilsTest.cpp",
         "NV12Compressor.cpp",
         "RotateAndCropMapperTest.cpp",
-	"SessionStatsBuilderTest.cpp",
+        "SessionStatsBuilderTest.cpp",
         "ZoomRatioTest.cpp",
     ],
 
@@ -44,13 +44,25 @@
         "libjpeg",
         "liblog",
         "libutils",
-        "camera_platform_flags_c_lib",
     ],
 
     static_libs: [
         "libgmock",
     ],
 
+    target: {
+        android: {
+            shared_libs: [
+                "camera_platform_flags_c_lib",
+            ],
+        },
+        host: {
+            shared_libs: [
+                "camera_platform_flags_c_lib_for_test",
+            ],
+        },
+    },
+
     cflags: [
         "-Wall",
         "-Wextra",
@@ -76,7 +88,7 @@
 
     defaults: [
         "libcameraservice_deps",
-	"cameraservice_test_hostsupported"
+        "cameraservice_test_hostsupported",
     ],
 
     // Only include libs that can't be run host-side here
@@ -84,6 +96,7 @@
         "libcutils",
         "libhidlbase",
         "libcamera_client",
+        "libgui",
         "libui",
         "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.camera.common@1.0",
@@ -108,6 +121,7 @@
 
     // Only include sources that can't be run host-side here
     srcs: [
+        "Camera3StreamSplitterTest.cpp",
         "CameraPermissionsTest.cpp",
         "CameraProviderManagerTest.cpp",
     ],
@@ -118,13 +132,13 @@
     name: "cameraservice_test_host",
 
     defaults: [
-        "cameraservice_test_hostsupported"
+        "cameraservice_test_hostsupported",
     ],
 
     include_dirs: [
         "frameworks/av/camera/include",
         "frameworks/av/camera/include/camera",
-        "frameworks/native/libs/binder/include_activitymanager"
+        "frameworks/native/libs/binder/include_activitymanager",
     ],
 
     // Only include libs that can't be run device-side here
diff --git a/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
new file mode 100644
index 0000000..5e32482
--- /dev/null
+++ b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
@@ -0,0 +1,186 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3StreamSplitterTest"
+// #define LOG_NDEBUG 0
+
+#include <android/hardware_buffer.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <com_android_internal_camera_flags.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/PixelFormat.h>
+
+#include <system/window.h>
+#include <vndk/window.h>
+
+#include <gtest/gtest.h>
+
+#include "../device3/Flags.h"
+
+#if USE_NEW_STREAM_SPLITTER
+#include "../device3/Camera3StreamSplitter.h"
+#else
+#include "../device3/deprecated/DeprecatedCamera3StreamSplitter.h"
+#endif  // USE_NEW_STREAM_SPLITTER
+
+using namespace android;
+
+namespace {
+
+uint64_t kConsumerUsage = AHARDWAREBUFFER_USAGE_CAMERA_READ;
+uint64_t kProducerUsage = AHARDWAREBUFFER_USAGE_CAMERA_READ;
+size_t kHalMaxBuffers = 3;
+uint32_t kWidth = 640;
+uint32_t kHeight = 480;
+PixelFormat kFormat = HAL_PIXEL_FORMAT_YCBCR_420_888;
+int64_t kDynamicRangeProfile = 0;
+
+std::tuple<sp<BufferItemConsumer>, sp<Surface>> createConsumerAndSurface() {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    sp<BufferItemConsumer> consumer = sp<BufferItemConsumer>::make(kConsumerUsage);
+    return {consumer, consumer->getSurface()};
+#else
+    sp<IGraphicBufferProducer> producer;
+    sp<IGraphicBufferConsumer> consumer;
+    BufferQueue::createBufferQueue(&producer, &consumer);
+
+    return {sp<BufferItemConsumer>::make(consumer, kConsumerUsage), sp<Surface>::make(producer)};
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+}
+
+class Camera3StreamSplitterTest : public testing::Test {
+  public:
+    void SetUp() override {
+#if USE_NEW_STREAM_SPLITTER
+        mSplitter = sp<Camera3StreamSplitter>::make();
+#else
+        mSplitter = sp<DeprecatedCamera3StreamSplitter>::make();
+#endif  // USE_NEW_STREAM_SPLITTER
+    }
+
+  protected:
+#if USE_NEW_STREAM_SPLITTER
+    sp<Camera3StreamSplitter> mSplitter;
+#else
+    sp<DeprecatedCamera3StreamSplitter> mSplitter;
+#endif  // USE_NEW_STREAM_SPLITTER
+};
+
+class TestSurfaceListener : public SurfaceListener {
+  public:
+    virtual void onBufferReleased() override { mNumBuffersReleased++; }
+    virtual bool needsReleaseNotify() { return true; }
+    virtual void onBufferDetached(int) override {}
+    virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>&) override {};
+
+    uint32_t mNumBuffersReleased = 0;
+};
+
+class TestConsumerListener : public BufferItemConsumer::FrameAvailableListener {
+  public:
+    TestConsumerListener(const wp<BufferItemConsumer>& consumer) : mConsumer(consumer) {}
+
+    virtual void onFrameAvailable(const BufferItem&) {
+        sp<BufferItemConsumer> consumer = mConsumer.promote();
+        EXPECT_NE(nullptr, consumer);
+
+        BufferItem item;
+        EXPECT_EQ(OK, consumer->acquireBuffer(&item, 0));
+        mNumBuffersAcquired++;
+        EXPECT_EQ(OK, consumer->releaseBuffer(item, Fence::NO_FENCE));
+    }
+    virtual void onFrameReplaced(const BufferItem&) {}
+    virtual void onFrameDequeued(const uint64_t) {}
+    virtual void onFrameCancelled(const uint64_t) {}
+    virtual void onFrameDetached(const uint64_t) {}
+
+    wp<BufferItemConsumer> mConsumer;
+    uint32_t mNumBuffersAcquired = 0;
+};
+
+}  // namespace
+
+TEST_F(Camera3StreamSplitterTest, WithoutSurfaces_NoBuffersConsumed) {
+    sp<Surface> consumer;
+    EXPECT_EQ(OK, mSplitter->connect({}, kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
+                                     kHeight, kFormat, &consumer, kDynamicRangeProfile));
+
+    sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
+    EXPECT_EQ(OK, consumer->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
+
+    sp<GraphicBuffer> buffer = new GraphicBuffer(kWidth, kHeight, kFormat, kProducerUsage);
+    EXPECT_EQ(OK, consumer->attachBuffer(buffer->getNativeBuffer()));
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK,
+              ANativeWindow_queueBuffer(consumer.get(), buffer->getNativeBuffer(), /*fenceFd*/ -1));
+
+    EXPECT_EQ(0u, surfaceListener->mNumBuffersReleased);
+}
+
+TEST_F(Camera3StreamSplitterTest, TestProcessSingleBuffer) {
+    //
+    // Set up output consumers:
+    //
+    constexpr auto kSurfaceId1 = 1;
+    auto [bufferItemConsumer1, surface1] = createConsumerAndSurface();
+    sp<TestConsumerListener> consumerListener1 =
+            sp<TestConsumerListener>::make(bufferItemConsumer1);
+    bufferItemConsumer1->setFrameAvailableListener(consumerListener1);
+
+    constexpr auto kSurfaceId2 = 2;
+    auto [bufferItemConsumer2, surface2] = createConsumerAndSurface();
+    sp<TestConsumerListener> consumerListener2 =
+            sp<TestConsumerListener>::make(bufferItemConsumer2);
+    bufferItemConsumer2->setFrameAvailableListener(consumerListener2);
+
+    //
+    // Connect it to the splitter, get the input surface, and set it up:
+    //
+    sp<Surface> inputSurface;
+    EXPECT_EQ(OK, mSplitter->connect({{kSurfaceId1, surface1}, {kSurfaceId2, surface2}},
+                                     kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
+                                     kHeight, kFormat, &inputSurface, kDynamicRangeProfile));
+    sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
+    EXPECT_EQ(OK, inputSurface->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK, inputSurface->getIGraphicBufferProducer()->allowAllocation(false));
+
+    //
+    // Create a buffer to use:
+    //
+    sp<GraphicBuffer> singleBuffer = new GraphicBuffer(kWidth, kHeight, kFormat, kProducerUsage);
+    EXPECT_NE(nullptr, singleBuffer);
+    mSplitter->attachBufferToOutputs(singleBuffer->getNativeBuffer(), {kSurfaceId1, kSurfaceId2});
+
+    //
+    // Verify that when we attach the buffer, it's processed appropriately:
+    //
+    EXPECT_EQ(OK, inputSurface->attachBuffer(singleBuffer->getNativeBuffer()));
+    EXPECT_EQ(OK, mSplitter->getOnFrameAvailableResult());
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK, ANativeWindow_queueBuffer(inputSurface.get(), singleBuffer->getNativeBuffer(),
+                                            /*fenceFd*/ -1));
+
+    EXPECT_EQ(1u, consumerListener1->mNumBuffersAcquired);
+    EXPECT_EQ(1u, consumerListener2->mNumBuffersAcquired);
+    EXPECT_EQ(1u, surfaceListener->mNumBuffersReleased);
+}
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index 30b4691..50aeaca 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <android/hardware/BnCameraServiceProxy.h>
 #include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
@@ -146,6 +147,11 @@
         return mCameraServiceProxy->notifyCameraState(cameraSessionStats);
     }
 
+    virtual binder::Status notifyFeatureCombinationStats(
+            const hardware::CameraFeatureCombinationStats& featureCombStats) override {
+        return mCameraServiceProxy->notifyFeatureCombinationStats(featureCombStats);
+    }
+
     virtual binder::Status isCameraDisabled(int userId, bool *ret) override {
         if (mOverrideCameraDisabled) {
             *ret = mCameraDisabled;
@@ -218,6 +224,11 @@
 // Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
 // policy, and succeed when it isn't.
 TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+    clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -228,10 +239,10 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
         ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -243,10 +254,10 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(status.isOk());
     }
@@ -254,6 +265,10 @@
 
 // Test that consecutive camera connections succeed.
 TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -263,18 +278,18 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
@@ -284,6 +299,10 @@
 // Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
 // in the second call.
 TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -293,18 +312,18 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                1/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                hardware::ICameraService::ROTATION_OVERRIDE_NONE,
+                clientAttribution, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
index 939126c..56cacef 100644
--- a/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraProviderManagerTest.cpp
@@ -445,10 +445,6 @@
 
     virtual std::shared_ptr<aidl::android::hardware::camera::provider::ICameraProvider>
             getService(const std::string& serviceName) override {
-        if (!flags::delay_lazy_hal_instantiation()) {
-            return mTestAidlCameraProvider;
-        }
-
         // If no provider has been given, fail; in reality, getService would
         // block for HALs that don't start correctly, so we should never use
         // getService when we don't have a valid HAL running
diff --git a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
index 673c149..75cf21d 100644
--- a/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
+++ b/services/camera/libcameraservice/tests/DepthProcessorTest.cpp
@@ -78,30 +78,30 @@
 }
 
 TEST(DepthProcessorTest, BadInput) {
+    static const size_t badInputBufferWidth = 17;
+    static const size_t badInputBufferHeight = 3;
+    static const size_t badInputJpegSize = 63;
+    static const size_t badInputBufferDepthSize = (badInputBufferWidth * badInputBufferHeight);
     int jpegQuality = 95;
 
     DepthPhotoInputFrame inputFrame;
+    std::vector<uint8_t> colorJpegBuffer(badInputJpegSize);
+    inputFrame.mMainJpegSize = colorJpegBuffer.size();
     // Worst case both depth and confidence maps have the same size as the main color image.
     inputFrame.mMaxJpegSize = inputFrame.mMainJpegSize * 3;
 
-    std::vector<uint8_t> colorJpegBuffer;
-    generateColorJpegBuffer(jpegQuality, ExifOrientation::ORIENTATION_UNDEFINED,
-            /*includeExif*/ false, /*switchDimensions*/ false, &colorJpegBuffer);
-
-    std::array<uint16_t, kTestBufferDepthSize> depth16Buffer;
-    generateDepth16Buffer(&depth16Buffer);
+    std::array<uint16_t, badInputBufferDepthSize> depth16Buffer;
 
     std::vector<uint8_t> depthPhotoBuffer(inputFrame.mMaxJpegSize);
     size_t actualDepthPhotoSize = 0;
 
-    inputFrame.mMainJpegWidth = kTestBufferWidth;
-    inputFrame.mMainJpegHeight = kTestBufferHeight;
+    inputFrame.mMainJpegWidth = badInputBufferWidth;
+    inputFrame.mMainJpegHeight = badInputBufferHeight;
     inputFrame.mJpegQuality = jpegQuality;
     ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 &actualDepthPhotoSize), 0);
 
     inputFrame.mMainJpegBuffer = reinterpret_cast<const char*> (colorJpegBuffer.data());
-    inputFrame.mMainJpegSize = colorJpegBuffer.size();
     ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 &actualDepthPhotoSize), 0);
 
@@ -113,6 +113,9 @@
 
     ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
                 nullptr), 0);
+
+    ASSERT_NE(processDepthPhotoFrame(inputFrame, depthPhotoBuffer.size(), depthPhotoBuffer.data(),
+                &actualDepthPhotoSize), 0);
 }
 
 TEST(DepthProcessorTest, BasicDepthPhotoValidation) {
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
index 93b440b..6c16317 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.cpp
@@ -28,6 +28,10 @@
 #include <hwbinder/IPCThreadState.h>
 #include <binderthreadstate/CallerUtils.h>
 
+namespace {
+    static const std::string kPermissionServiceName = "permission";
+} // namespace anonymous
+
 namespace android {
 
 namespace flags = com::android::internal::camera::flags;
@@ -80,6 +84,33 @@
     return;
 }
 
+// TODO(362551824): Make USE_CALLING_UID more explicit with a scoped enum.
+bool AttributionAndPermissionUtils::resolveClientUid(/*inout*/ int& clientUid) {
+    int callingUid = getCallingUid();
+
+    if (clientUid == hardware::ICameraService::USE_CALLING_UID) {
+        clientUid = callingUid;
+    } else if (!isTrustedCallingUid(callingUid)) {
+        return false;
+    }
+
+    return true;
+}
+
+// TODO(362551824): Make USE_CALLING_UID more explicit with a scoped enum.
+bool AttributionAndPermissionUtils::resolveClientPid(/*inout*/ int& clientPid) {
+    int callingUid = getCallingUid();
+    int callingPid = getCallingPid();
+
+    if (clientPid == hardware::ICameraService::USE_CALLING_PID) {
+        clientPid = callingPid;
+    } else if (!isTrustedCallingUid(callingUid)) {
+        return false;
+    }
+
+    return true;
+}
+
 bool AttributionAndPermissionUtils::checkAutomotivePrivilegedClient(const std::string &cameraId,
         const AttributionSourceState &attributionSource) {
     if (isAutomotivePrivilegedClient(attributionSource.uid)) {
@@ -111,16 +142,9 @@
         return true;
     }
 
-    if (!flags::cache_permission_services()) {
-        PermissionChecker permissionChecker;
-        return permissionChecker.checkPermissionForPreflight(
-                       toString16(permission), attributionSource, toString16(message),
-                       attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
-    } else {
-        return mPermissionChecker->checkPermissionForPreflight(
-                       toString16(permission), attributionSource, toString16(message),
-                       attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
-    }
+    return mPermissionChecker->checkPermissionForPreflight(
+                    toString16(permission), attributionSource, toString16(message),
+                    attributedOpCode) != PermissionChecker::PERMISSION_HARD_DENIED;
 }
 
 // Can camera service trust the caller based on the calling UID?
@@ -162,6 +186,33 @@
     return uid == AID_AUTOMOTIVE_EVS;
 }
 
+std::string AttributionAndPermissionUtils::getPackageNameFromUid(int clientUid) const {
+    std::string packageName("");
+
+    sp<IPermissionController> permCtrl = getPermissionController();
+    if (permCtrl == nullptr) {
+        // Return empty package name and the further interaction
+        // with camera will likely fail
+        return packageName;
+    }
+
+    Vector<String16> packages;
+
+    permCtrl->getPackagesForUid(clientUid, packages);
+
+    if (packages.isEmpty()) {
+        ALOGE("No packages for calling UID %d", clientUid);
+        // Return empty package name and the further interaction
+        // with camera will likely fail
+        return packageName;
+    }
+
+    // Arbitrarily pick the first name in the list
+    packageName = toStdString(packages[0]);
+
+    return packageName;
+}
+
 status_t AttributionAndPermissionUtils::getUidForPackage(const std::string &packageName,
         int userId, /*inout*/uid_t& uid, int err) {
     PermissionController pc;
@@ -218,4 +269,23 @@
             attributionSource, std::string(), AppOpsManager::OP_NONE);
 }
 
+const sp<IPermissionController>& AttributionAndPermissionUtils::getPermissionController() const {
+    static const char* kPermissionControllerService = "permission";
+    static thread_local sp<IPermissionController> sPermissionController = nullptr;
+
+    if (sPermissionController == nullptr ||
+            !IInterface::asBinder(sPermissionController)->isBinderAlive()) {
+        sp<IServiceManager> sm = defaultServiceManager();
+        sp<IBinder> binder = sm->checkService(toString16(kPermissionControllerService));
+        if (binder == nullptr) {
+            ALOGE("%s: Could not get permission service", __FUNCTION__);
+            sPermissionController = nullptr;
+        } else {
+            sPermissionController = interface_cast<IPermissionController>(binder);
+        }
+    }
+
+    return sPermissionController;
+}
+
 } // namespace android
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index 4f238ab..8bfe6d8 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -19,6 +19,7 @@
 #include <android/content/AttributionSourceState.h>
 #include <android/permission/PermissionChecker.h>
 #include <binder/BinderService.h>
+#include <binder/IPermissionController.h>
 #include <private/android_filesystem_config.h>
 
 namespace android {
@@ -41,12 +42,29 @@
         mCameraService = cameraService;
     }
 
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+        AttributionSourceState attributionSource{};
+        attributionSource.pid = callingPid;
+        attributionSource.uid = callingUid;
+        return attributionSource;
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            int32_t deviceId) {
+        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+        attributionSource.deviceId = deviceId;
+        return attributionSource;
+    }
+
     // Utilities handling Binder calling identities (previously in CameraThreadState)
     virtual int getCallingUid();
     virtual int getCallingPid();
     virtual int64_t clearCallingIdentity();
     virtual void restoreCallingIdentity(int64_t token);
 
+    virtual bool resolveClientUid(/*inout*/ int& clientUid);
+    virtual bool resolveClientPid(/*inout*/ int& clientPid);
+
     /**
      * Pre-grants the permission if the attribution source uid is for an automotive
      * privileged client. Otherwise uses system service permission checker to check
@@ -71,6 +89,15 @@
      */
     virtual bool isAutomotivePrivilegedClient(int32_t uid);
 
+    // In some cases the calling code has no access to the package it runs under.
+    // For example, NDK camera API.
+    // In this case we will get the packages for the calling UID and pick the first one
+    // for attributing the app op. This will work correctly for runtime permissions
+    // as for legacy apps we will toggle the app op for all packages in the UID.
+    // The caveat is that the operation may be attributed to the wrong package and
+    // stats based on app ops may be slightly off.
+    virtual std::string getPackageNameFromUid(int clientUid) const;
+
     virtual status_t getUidForPackage(const std::string &packageName, int userId,
             /*inout*/uid_t& uid, int err);
     virtual bool isCallerCameraServerNotDelegating();
@@ -104,6 +131,8 @@
             const AttributionSourceState &attributionSource);
 
   private:
+    virtual const sp<IPermissionController>& getPermissionController() const;
+
     std::unique_ptr<permission::PermissionChecker> mPermissionChecker =
             std::make_unique<permission::PermissionChecker>();
 };
@@ -123,17 +152,13 @@
             : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
-        AttributionSourceState attributionSource{};
-        attributionSource.pid = callingPid;
-        attributionSource.uid = callingUid;
-        return attributionSource;
+        return AttributionAndPermissionUtils::buildAttributionSource(callingPid, callingUid);
     }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
             int32_t deviceId) {
-        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
-        attributionSource.deviceId = deviceId;
-        return attributionSource;
+        return AttributionAndPermissionUtils::buildAttributionSource(
+                callingPid, callingUid, deviceId);
     }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
@@ -160,6 +185,14 @@
         mAttributionAndPermissionUtils->restoreCallingIdentity(token);
     }
 
+    bool resolveClientUid(/*inout*/ int& clientUid) const {
+        return mAttributionAndPermissionUtils->resolveClientUid(clientUid);
+    }
+
+    bool resolveClientPid(/*inout*/ int& clientPid) const {
+        return mAttributionAndPermissionUtils->resolveClientPid(clientPid);
+    }
+
     // The word 'System' here does not refer to callers only on the system
     // partition. They just need to have an android system uid.
     bool callerHasSystemUid() const {
@@ -237,6 +270,10 @@
         return mAttributionAndPermissionUtils->getUidForPackage(packageName, userId, uid, err);
     }
 
+    std::string getPackageNameFromUid(int clientUid) const {
+        return mAttributionAndPermissionUtils->getPackageNameFromUid(clientUid);
+    }
+
     bool isCallerCameraServerNotDelegating() const {
         return mAttributionAndPermissionUtils->isCallerCameraServerNotDelegating();
     }
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 4afae9b..85bca6f 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -18,19 +18,25 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <gui/Surface.h>
 #include <inttypes.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
 #include <camera/StringUtils.h>
 #include <binder/IServiceManager.h>
+#include <system/window.h>
+
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
 
 #include "CameraServiceProxyWrapper.h"
 
 namespace android {
 
 using hardware::CameraExtensionSessionStats;
+using hardware::CameraFeatureCombinationStats;
 using hardware::CameraSessionStats;
 using hardware::ICameraServiceProxy;
+using hardware::camera2::params::SessionConfiguration;
 
 namespace {
 // Sentinel value to be returned when extension session with a stale or invalid key is reported.
@@ -215,6 +221,111 @@
     proxyBinder->pingForUserUpdate();
 }
 
+int64_t CameraServiceProxyWrapper::encodeSessionConfiguration(
+        const SessionConfiguration& sessionConfig) {
+    int64_t features = CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+    const static int32_t WIDTH_4K = 3840;
+    const static int32_t HEIGHT_4K = 2160;
+
+    // Check session parameters
+    if (sessionConfig.hasSessionParameters()) {
+        const auto& parameters = sessionConfig.getSessionParameters();
+
+        camera_metadata_ro_entry fpsEntry = parameters.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
+        if (fpsEntry.count == 2 && fpsEntry.data.i32[1] == 60) {
+            features |= CameraFeatureCombinationStats::CAMERA_FEATURE_60_FPS;
+        }
+
+        camera_metadata_ro_entry stabEntry =
+                parameters.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE);
+        if (stabEntry.count == 1 && stabEntry.data.u8[0] ==
+                ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) {
+            features |= CameraFeatureCombinationStats::CAMERA_FEATURE_STABILIZATION;
+        }
+    }
+
+    // Check output configurations
+    const auto& outputConfigs = sessionConfig.getOutputConfigurations();
+    for (const auto& config : outputConfigs) {
+        int format = config.getFormat();
+        int dataSpace = config.getDataspace();
+        int64_t dynamicRangeProfile = config.getDynamicRangeProfile();
+
+        // Check JPEG and JPEG_R features
+        if (format == HAL_PIXEL_FORMAT_BLOB) {
+            if (dataSpace == HAL_DATASPACE_V0_JFIF) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_JPEG;
+            } else if (dataSpace == static_cast<android_dataspace_t>(
+                    aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_JPEG_R;
+            }
+        } else {
+            if (dynamicRangeProfile == HAL_DATASPACE_BT2020_HLG) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_HLG10;
+            }
+
+            // Check 4K
+            const auto& gbps = config.getGraphicBufferProducers();
+            int32_t width = 0, height = 0;
+            if (gbps.size() > 0) {
+                if (gbps[0] == nullptr) {
+                    ALOGE("%s: Failed to query size due to abandoned surface.",
+                            __FUNCTION__);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+
+                sp<Surface> surface = new Surface(gbps[0], /*useAsync*/false);
+                ANativeWindow *anw = surface.get();
+
+                width = ANativeWindow_getWidth(anw);
+                if (width < 0) {
+                    ALOGE("%s: Failed to query Surface width: %s (%d)",
+                            __FUNCTION__, strerror(-width), width);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+                height = ANativeWindow_getHeight(anw);
+                if (height < 0) {
+                    ALOGE("%s: Failed to query Surface height: %s (%d)",
+                            __FUNCTION__, strerror(-height), height);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+            } else {
+                width = config.getWidth();
+                height = config.getHeight();
+            }
+            if (width == WIDTH_4K && height == HEIGHT_4K) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_4K;
+            }
+        }
+    }
+    return features;
+}
+
+// Note: The `ret` parameter is the return value of the
+// `isSessionConfigurationWithParametersSupporteed` binder call from the app.
+void CameraServiceProxyWrapper::logFeatureCombinationInternal(
+        const std::string &cameraId, int clientUid,
+        const SessionConfiguration& sessionConfiguration, binder::Status ret,
+        int type) {
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) return;
+
+    int64_t featureCombination = encodeSessionConfiguration(sessionConfiguration);
+    int queryStatus = ret.isOk() ? OK : ret.serviceSpecificErrorCode();
+    CameraFeatureCombinationStats stats;
+    stats.mCameraId = cameraId;
+    stats.mUid = clientUid;
+    stats.mFeatureCombination = featureCombination;
+    stats.mQueryType = type;
+    stats.mStatus = queryStatus;
+
+    auto status = proxyBinder->notifyFeatureCombinationStats(stats);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed to notify feature combination stats: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+}
+
 int CameraServiceProxyWrapper::getRotateAndCropOverride(const std::string &packageName,
         int lensFacing, int userId) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index b6a967f..ad8b1cd 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
 #define ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
 
+#include <android/hardware/CameraFeatureCombinationStats.h>
 #include <android/hardware/ICameraServiceProxy.h>
 
 #include <utils/Mutex.h>
@@ -26,7 +27,7 @@
 #include <string>
 
 #include <camera/CameraSessionStats.h>
-
+#include <camera/camera2/SessionConfiguration.h>
 namespace android {
 
 class CameraServiceProxyWrapper {
@@ -86,6 +87,11 @@
     // ID generated for the open event associated with them.
     static int64_t generateLogId(std::random_device& randomDevice);
 
+    static int64_t encodeSessionConfiguration(const SessionConfiguration& sessionConfig);
+
+    void logFeatureCombinationInternal(const std::string &cameraId, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret, int type);
 public:
     CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
             mCameraServiceProxy(serviceProxy)
@@ -115,6 +121,20 @@
             bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
             const std::vector<hardware::CameraStreamStats>& streamStats);
 
+    // Feature combination query
+    void logFeatureCombinationQuery(const std::string &id, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret) {
+        logFeatureCombinationInternal(id, clientUid, sessionConfiguration, ret,
+                (int)hardware::CameraFeatureCombinationStats::QueryType::QUERY_FEATURE_COMBINATION);
+    }
+    void logSessionCharacteristicsQuery(const std::string &id, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret) {
+        logFeatureCombinationInternal(id, clientUid, sessionConfiguration, ret, (int)
+                hardware::CameraFeatureCombinationStats::QueryType::QUERY_SESSION_CHARACTERISTICS);
+    }
+
     // Ping camera service proxy for user update
     void pingCameraServiceProxy();
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
index a7a2b5e..40ca276 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtils.cpp
@@ -21,6 +21,7 @@
 #include "../api2/HeicCompositeStream.h"
 #include "aidl/android/hardware/graphics/common/Dataspace.h"
 #include "api2/JpegRCompositeStream.h"
+#include "binder/Status.h"
 #include "common/CameraDeviceBase.h"
 #include "common/HalConversionsTemplated.h"
 #include "../CameraService.h"
@@ -679,6 +680,67 @@
     stream->useCase = static_cast<StreamUseCases>(streamInfo.streamUseCase);
 }
 
+binder::Status mapStream(const OutputStreamInfo& streamInfo, bool isCompositeJpegRDisabled,
+        const CameraMetadata& deviceInfo, camera_stream_rotation_t rotation,
+        size_t* streamIdx/*out*/, const std::string &physicalId, int32_t groupId,
+        const std::string& logicalCameraId,
+        aidl::android::hardware::camera::device::StreamConfiguration &streamConfiguration /*out*/,
+        bool *earlyExit /*out*/) {
+    bool isDepthCompositeStream =
+            camera3::DepthCompositeStream::isDepthCompositeStreamInfo(streamInfo);
+    bool isHeicCompositeStream =
+            camera3::HeicCompositeStream::isHeicCompositeStreamInfo(streamInfo);
+    bool isJpegRCompositeStream =
+            camera3::JpegRCompositeStream::isJpegRCompositeStreamInfo(streamInfo) &&
+            !isCompositeJpegRDisabled;
+    if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
+        // We need to take in to account that composite streams can have
+        // additional internal camera streams.
+        std::vector<OutputStreamInfo> compositeStreams;
+        status_t ret;
+        if (isDepthCompositeStream) {
+          // TODO: Take care of composite streams.
+            ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+                    deviceInfo, &compositeStreams);
+        } else if (isHeicCompositeStream) {
+            ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        } else {
+            ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
+                deviceInfo, &compositeStreams);
+        }
+
+        if (ret != OK) {
+            std::string msg = fmt::sprintf(
+                    "Camera %s: Failed adding composite streams: %s (%d)",
+                    logicalCameraId.c_str(), strerror(-ret), ret);
+            ALOGE("%s: %s", __FUNCTION__, msg.c_str());
+            return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
+        }
+
+        if (compositeStreams.size() == 0) {
+            // No internal streams means composite stream not
+            // supported.
+            *earlyExit = true;
+            return binder::Status::ok();
+        } else if (compositeStreams.size() > 1) {
+            size_t streamCount = streamConfiguration.streams.size() + compositeStreams.size() - 1;
+            streamConfiguration.streams.resize(streamCount);
+        }
+
+        for (const auto& compositeStream : compositeStreams) {
+            mapStreamInfo(compositeStream, rotation,
+                    physicalId, groupId,
+                    &streamConfiguration.streams[(*streamIdx)++]);
+        }
+    } else {
+        mapStreamInfo(streamInfo, rotation,
+                physicalId, groupId, &streamConfiguration.streams[(*streamIdx)++]);
+    }
+
+    return binder::Status::ok();
+}
+
 binder::Status
 convertToHALStreamCombination(
         const SessionConfiguration& sessionConfiguration,
@@ -831,8 +893,13 @@
                                 "Deferred surface sensor pixel modes not valid");
             }
             streamInfo.streamUseCase = streamUseCase;
-            mapStreamInfo(streamInfo, camera3::CAMERA_STREAM_ROTATION_0, physicalCameraId, groupId,
-                    &streamConfiguration.streams[streamIdx++]);
+            auto status = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                    camera3::CAMERA_STREAM_ROTATION_0, &streamIdx, physicalCameraId, groupId,
+                    logicalCameraId, streamConfiguration, earlyExit);
+            if (*earlyExit || !status.isOk()) {
+                return status;
+            }
+
             isStreamInfoValid = true;
 
             if (numBufferProducers == 0) {
@@ -851,57 +918,11 @@
                 return res;
 
             if (!isStreamInfoValid) {
-                bool isDepthCompositeStream =
-                        camera3::DepthCompositeStream::isDepthCompositeStream(surface);
-                bool isHeicCompositeStream =
-                        camera3::HeicCompositeStream::isHeicCompositeStream(surface);
-                bool isJpegRCompositeStream =
-                        camera3::JpegRCompositeStream::isJpegRCompositeStream(surface) &&
-                        !isCompositeJpegRDisabled;
-                if (isDepthCompositeStream || isHeicCompositeStream || isJpegRCompositeStream) {
-                    // We need to take in to account that composite streams can have
-                    // additional internal camera streams.
-                    std::vector<OutputStreamInfo> compositeStreams;
-                    if (isDepthCompositeStream) {
-                      // TODO: Take care of composite streams.
-                        ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
-                                deviceInfo, &compositeStreams);
-                    } else if (isHeicCompositeStream) {
-                        ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    } else {
-                        ret = camera3::JpegRCompositeStream::getCompositeStreamInfo(streamInfo,
-                            deviceInfo, &compositeStreams);
-                    }
-
-                    if (ret != OK) {
-                        std::string msg = fmt::sprintf(
-                                "Camera %s: Failed adding composite streams: %s (%d)",
-                                logicalCameraId.c_str(), strerror(-ret), ret);
-                        ALOGE("%s: %s", __FUNCTION__, msg.c_str());
-                        return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
-                    }
-
-                    if (compositeStreams.size() == 0) {
-                        // No internal streams means composite stream not
-                        // supported.
-                        *earlyExit = true;
-                        return binder::Status::ok();
-                    } else if (compositeStreams.size() > 1) {
-                        streamCount += compositeStreams.size() - 1;
-                        streamConfiguration.streams.resize(streamCount);
-                    }
-
-                    for (const auto& compositeStream : compositeStreams) {
-                        mapStreamInfo(compositeStream,
-                                static_cast<camera_stream_rotation_t> (it.getRotation()),
-                                physicalCameraId, groupId,
-                                &streamConfiguration.streams[streamIdx++]);
-                    }
-                } else {
-                    mapStreamInfo(streamInfo,
-                            static_cast<camera_stream_rotation_t> (it.getRotation()),
-                            physicalCameraId, groupId, &streamConfiguration.streams[streamIdx++]);
+                auto status  = mapStream(streamInfo, isCompositeJpegRDisabled, deviceInfo,
+                        static_cast<camera_stream_rotation_t> (it.getRotation()), &streamIdx,
+                        physicalCameraId, groupId, logicalCameraId, streamConfiguration, earlyExit);
+                if (*earlyExit || !status.isOk()) {
+                    return status;
                 }
                 isStreamInfoValid = true;
             }
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index cfa1815..9986a84 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -23,6 +23,7 @@
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
+#include "utils/Utils.h"
 
 using android::camera3::OutputStreamInfo;
 using android::hardware::camera2::ICameraDeviceUser;
@@ -48,16 +49,16 @@
     hidl.streams.resize(aidl.streams.size());
     size_t i = 0;
     for (const auto &stream : aidl.streams) {
-        if (static_cast<int>(stream.dynamicRangeProfile) !=
+        if (eToI(stream.dynamicRangeProfile) !=
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
             ALOGE("%s Dynamic range profile %" PRId64 " not supported by HIDL", __FUNCTION__,
-                    stream.dynamicRangeProfile);
+                    eToI(stream.dynamicRangeProfile));
             return BAD_VALUE;
         }
 
-        if (static_cast<int>(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+        if (eToI(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
             ALOGE("%s Stream use case %" PRId64 "not supported by HIDL", __FUNCTION__,
-                    stream.useCase);
+                    eToI(stream.useCase));
             return BAD_VALUE;
         }
 
diff --git a/services/camera/libcameraservice/utils/TagMonitor.cpp b/services/camera/libcameraservice/utils/TagMonitor.cpp
index 38de93a..5258c0e 100644
--- a/services/camera/libcameraservice/utils/TagMonitor.cpp
+++ b/services/camera/libcameraservice/utils/TagMonitor.cpp
@@ -384,7 +384,7 @@
         }
         returnStr << "]\n";
     }
-    return std::move(returnStr.str());
+    return returnStr.str();
 }
 
 template<typename T>
diff --git a/services/camera/libcameraservice/utils/Utils.cpp b/services/camera/libcameraservice/utils/Utils.cpp
index c8f5e86..5f61de5 100644
--- a/services/camera/libcameraservice/utils/Utils.cpp
+++ b/services/camera/libcameraservice/utils/Utils.cpp
@@ -14,42 +14,59 @@
  * limitations under the License.
  */
 
+#define LOG_TAG "Camera3-Utils"
+
 #include "Utils.h"
 #include <android-base/properties.h>
 #include <com_android_internal_camera_flags.h>
-
+#include <utils/Errors.h>
+#include <utils/Log.h>
+#include <vendorsupport/api_level.h>
 
 namespace android {
 
-using namespace com::android::internal::camera::flags;
-
-constexpr const char *LEGACY_VNDK_VERSION_PROP = "ro.vndk.version";
-constexpr const char *BOARD_API_LEVEL_PROP = "ro.board.api_level";
-constexpr int MAX_VENDOR_API_LEVEL = 1000000;
-constexpr int FIRST_VNDK_VERSION = 202404;
-
 int getVNDKVersionFromProp(int defaultVersion) {
-    if (!com_android_internal_camera_flags_use_ro_board_api_level_for_vndk_version()) {
-        return base::GetIntProperty(LEGACY_VNDK_VERSION_PROP, defaultVersion);
-    }
-
-    int vndkVersion = base::GetIntProperty(BOARD_API_LEVEL_PROP, MAX_VENDOR_API_LEVEL);
-
-    if (vndkVersion == MAX_VENDOR_API_LEVEL) {
-        // Couldn't find property
+    int vendorApiLevel = AVendorSupport_getVendorApiLevel();
+    if (vendorApiLevel == 0) {
+        // Couldn't find vendor API level, return default
         return defaultVersion;
     }
 
-    if (vndkVersion < __ANDROID_API_V__) {
-        // VNDK versions below V return the corresponding SDK version.
-        return vndkVersion;
-    }
-
-    // VNDK for Android V and above are of the format YYYYMM starting with 202404 and is bumped
-    // up once a year. So V would be 202404 and the next one would be 202504.
-    // This is the same assumption as that made in system/core/init/property_service.cpp.
-    vndkVersion = (vndkVersion - FIRST_VNDK_VERSION) / 100;
-    return __ANDROID_API_V__ + vndkVersion;
+    // Vendor API level for Android V and above are of the format YYYYMM starting with 202404.
+    // AVendorSupport_getSdkApiLevelOf maps them back to SDK API levels while leaving older
+    // values unchanged.
+    return AVendorSupport_getSdkApiLevelOf(vendorApiLevel);
 }
 
-} // namespace android
+RunThreadWithRealtimePriority::RunThreadWithRealtimePriority(int tid)
+    : mTid(tid), mPreviousPolicy(sched_getscheduler(tid)) {
+    auto res = sched_getparam(mTid, &mPreviousParams);
+    if (res != OK) {
+        ALOGE("Can't retrieve thread scheduler parameters: %s (%d)", strerror(-res), res);
+        return;
+    }
+
+    struct sched_param param = {0};
+    param.sched_priority = kRequestThreadPriority;
+
+    res = sched_setscheduler(mTid, SCHED_FIFO, &param);
+    if (res != OK) {
+        ALOGW("Can't set realtime priority for thread: %s (%d)", strerror(-res), res);
+    } else {
+        ALOGD("Set real time priority for thread (tid %d)", mTid);
+        mPolicyBumped = true;
+    }
+}
+
+RunThreadWithRealtimePriority::~RunThreadWithRealtimePriority() {
+    if (mPolicyBumped) {
+        auto res = sched_setscheduler(mTid, mPreviousPolicy, &mPreviousParams);
+        if (res != OK) {
+            ALOGE("Can't set regular priority for thread: %s (%d)", strerror(-res), res);
+        } else {
+            ALOGD("Set regular priority for thread (tid %d)", mTid);
+        }
+    }
+}
+
+}  // namespace android
diff --git a/services/camera/libcameraservice/utils/Utils.h b/services/camera/libcameraservice/utils/Utils.h
index f8a107d..0eb5e2c 100644
--- a/services/camera/libcameraservice/utils/Utils.h
+++ b/services/camera/libcameraservice/utils/Utils.h
@@ -17,9 +17,23 @@
 #ifndef ANDROID_SERVERS_CAMERA_UTILS_H
 #define ANDROID_SERVERS_CAMERA_UTILS_H
 
+#include <sched.h>
+#include <unistd.h>
+#include <type_traits>
+
 namespace android {
 
 /**
+ * Magically convert an enum to its underlying integer type, mostly so they can be
+ * printed with printf-style formatters without warnings.
+ * Backport of C++23 std::to_underlying()
+ */
+template<typename Enum>
+constexpr std::underlying_type_t<Enum> eToI(Enum val) {
+    return static_cast<std::underlying_type_t<Enum>>(val);
+}
+
+/**
  * As of Android V, ro.board.api_level returns the year and month of release (ex. 202404)
  * instead of release SDK version. This function maps year/month format back to release
  * SDK version.
@@ -28,6 +42,40 @@
  */
 int getVNDKVersionFromProp(int defaultVersion);
 
+/**
+ * An instance of this class will raise the scheduling policy of a given
+ * given thread to real time and keep it this way throughout the lifetime
+ * of the object. The thread scheduling policy will revert back to its original
+ * state after the instances is released. By default the implementation will
+ * raise the priority of the current thread unless clients explicitly specify
+ * another thread id.
+ * Client must avoid:
+ *  - Keeping an instance of this class for extended and long running operations.
+ *    This is only intended for short/temporarily priority bumps that mitigate
+ *    scheduling delays within critical camera paths.
+ *  - Allocating instances of this class on the memory heap unless clients have
+ *    complete control over the object lifetime. It is preferable to allocate
+ *    instances of this class on the stack instead.
+ *  - Nesting multiple instances of this class using the same default or same thread id.
+ */
+class RunThreadWithRealtimePriority final {
+  public:
+    RunThreadWithRealtimePriority(int tid = gettid());
+    ~RunThreadWithRealtimePriority();
+
+    RunThreadWithRealtimePriority(const RunThreadWithRealtimePriority&) = delete;
+    RunThreadWithRealtimePriority& operator=(const RunThreadWithRealtimePriority&) = delete;
+
+    // SCHED_FIFO priority for request submission thread in HFR mode
+    static const int kRequestThreadPriority = 1;
+
+  private:
+    int mTid;
+    int mPreviousPolicy;
+    bool mPolicyBumped = false;
+    struct sched_param mPreviousParams;
+};
+
 } // namespace android
 
 #endif //ANDROID_SERVERS_CAMERA_UTILS_H
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
index 1d0c5a2..22dd806 100644
--- a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.cpp
@@ -29,48 +29,46 @@
 void VirtualDeviceCameraIdMapper::addCamera(const std::string& cameraId,
         int32_t deviceId, const std::string& mappedCameraId) {
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
         return;
     }
 
     if (deviceId == kDefaultDeviceId) {
-        ALOGD("%s: Not adding entry for a camera of the default device", __func__);
+        ALOGV("%s: Not adding entry for a camera of the default device", __func__);
         return;
     }
 
-    ALOGD("%s: Adding camera %s for device %d with mapped id %s", __func__, cameraId.c_str(),
+    ALOGV("%s: Adding camera %s for device %d with mapped id %s", __func__, cameraId.c_str(),
           deviceId, mappedCameraId.c_str());
 
     std::scoped_lock lock(mLock);
     mDeviceIdMappedCameraIdPairToCameraIdMap[{deviceId, mappedCameraId}] = cameraId;
+    mCameraIdToDeviceIdMappedCameraIdPairMap[cameraId] = {deviceId, mappedCameraId};
 }
 
 void VirtualDeviceCameraIdMapper::removeCamera(const std::string& cameraId) {
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
         return;
     }
 
+    auto deviceIdAndMappedCameraIdPair = getDeviceIdAndMappedCameraIdPair(cameraId);
+
     std::scoped_lock lock(mLock);
-    for (auto it = mDeviceIdMappedCameraIdPairToCameraIdMap.begin();
-         it != mDeviceIdMappedCameraIdPairToCameraIdMap.end(); ++it) {
-        if (it->first.second == cameraId) {
-            mDeviceIdMappedCameraIdPairToCameraIdMap.erase(it);
-            return;
-        }
-    }
+    mCameraIdToDeviceIdMappedCameraIdPairMap.erase(cameraId);
+    mDeviceIdMappedCameraIdPairToCameraIdMap.erase(deviceIdAndMappedCameraIdPair);
 }
 
 std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
         int32_t deviceId, const std::string& mappedCameraId) const {
     if (deviceId == kDefaultDeviceId) {
-        ALOGD("%s: Returning the camera id as the mapped camera id for camera %s, as it "
+        ALOGV("%s: Returning the camera id as the mapped camera id for camera %s, as it "
               "belongs to the default device", __func__, mappedCameraId.c_str());
         return mappedCameraId;
     }
 
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled, returning the camera id as "
+        ALOGV("%s: Device-aware camera feature is not enabled, returning the camera id as "
               "the mapped camera id for camera %s", __func__, mappedCameraId.c_str());
         return mappedCameraId;
     }
@@ -79,7 +77,7 @@
     auto iterator = mDeviceIdMappedCameraIdPairToCameraIdMap.find(
             {deviceId, mappedCameraId});
     if (iterator == mDeviceIdMappedCameraIdPairToCameraIdMap.end()) {
-        ALOGW("%s: No entry found for device id %d and mapped camera id %s", __func__,
+        ALOGV("%s: No entry found for device id %d and mapped camera id %s", __func__,
               deviceId, mappedCameraId.c_str());
         return std::nullopt;
     }
@@ -89,19 +87,17 @@
 std::pair<int32_t, std::string> VirtualDeviceCameraIdMapper::getDeviceIdAndMappedCameraIdPair(
         const std::string& cameraId) const {
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
         return std::make_pair(kDefaultDeviceId, cameraId);
     }
 
     std::scoped_lock lock(mLock);
-    for (const auto& [deviceIdMappedCameraIdPair, actualCameraId]
-            : mDeviceIdMappedCameraIdPairToCameraIdMap) {
-        if (actualCameraId == cameraId) {
-            return deviceIdMappedCameraIdPair;
-        }
+    auto iterator = mCameraIdToDeviceIdMappedCameraIdPairMap.find(cameraId);
+    if (iterator != mCameraIdToDeviceIdMappedCameraIdPairMap.end()) {
+        return iterator->second;
     }
     ALOGV("%s: No device id and mapped camera id found for camera id %s, so it must belong "
-            "to the default device ? ", __func__, cameraId.c_str());
+            "to the default device ?", __func__, cameraId.c_str());
     return std::make_pair(kDefaultDeviceId, cameraId);
 }
 
@@ -124,7 +120,7 @@
 std::optional<std::string> VirtualDeviceCameraIdMapper::getActualCameraId(
         int api1CameraId, int32_t deviceId) const {
     if (!vd_flags::camera_device_awareness()) {
-        ALOGD("%s: Device-aware camera feature is not enabled", __func__);
+        ALOGV("%s: Device-aware camera feature is not enabled", __func__);
         return std::nullopt;
     }
 
@@ -139,7 +135,7 @@
             matchingCameraIndex++;
         }
     }
-    ALOGW("%s: No entry found for device id %d and API 1 camera id %d", __func__,
+    ALOGV("%s: No entry found for device id %d and API 1 camera id %d", __func__,
           deviceId, api1CameraId);
     return std::nullopt;
 }
diff --git a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
index 96c0cb4..fdfde23 100644
--- a/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
+++ b/services/camera/libcameraservice/utils/VirtualDeviceCameraIdMapper.h
@@ -79,6 +79,9 @@
     // Map of (deviceId, app-visible cameraId) -> HAL-visible cameraId
     std::map<std::pair<int32_t, std::string>, std::string>
             mDeviceIdMappedCameraIdPairToCameraIdMap GUARDED_BY(mLock);
+    // Map of HAL-visible cameraId -> (deviceId, app-visible cameraId)
+    std::map<std::string, std::pair<int32_t, std::string>>
+            mCameraIdToDeviceIdMappedCameraIdPairMap GUARDED_BY(mLock);
 };
 
 } // namespace android
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index 90530f6..dd64daa 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -25,6 +25,7 @@
         "libEGL",
         "libGLESv2",
         "libGLESv3",
+        "android.companion.virtualdevice.flags-aconfig-cc",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
@@ -50,7 +51,6 @@
         "util/JpegUtil.cc",
         "util/MetadataUtil.cc",
         "util/Util.cc",
-        "util/TestPatternHelper.cc",
         "util/EglDisplayContext.cc",
         "util/EglFramebuffer.cc",
         "util/EglProgram.cc",
@@ -66,13 +66,7 @@
 cc_library_static {
     name: "libvirtualcamera",
     srcs: [
-        "VirtualCameraProvider.cc",
-        "VirtualCameraDevice.cc",
-        "VirtualCameraSession.cc",
-        "VirtualCameraStream.cc",
-        "VirtualCameraService.cc",
-        "VirtualCameraSessionContext.cc",
-        "VirtualCameraRenderThread.cc",
+        "*.cc",
     ],
     defaults: [
         "libvirtualcamera_defaults",
diff --git a/services/camera/virtualcamera/README.md b/services/camera/virtualcamera/README.md
new file mode 100644
index 0000000..04b4811
--- /dev/null
+++ b/services/camera/virtualcamera/README.md
@@ -0,0 +1,164 @@
+# Virtual Camera
+
+The virtual camera feature allows 3rd party application to expose a remote or
+virtual camera to the standard Android camera frameworks (Camera2/CameraX, NDK,
+camera1).
+
+The stack is composed into 4 different parts:
+
+1.  The **Virtual Camera Service** (this directory), implementing the Camera HAL
+    and acts as an interface between the Android Camera Server and the *Virtual
+    Camera Owner* (via the VirtualDeviceManager APIs).
+
+2.  The **VirtualDeviceManager** running in the system process and handling the
+    communication between the Virtual Camera service and the Virtual Camera
+    owner
+
+3.  The **Virtual Camera Owner**, the client application declaring the Virtual
+    Camera and handling the production of image data. We will also refer to this
+    part as the **producer**
+
+4.  The **Consumer Application**, the client application consuming camera data,
+    which can be any application using the camera APIs
+
+This document describes the functionalities of the *Virtual Camera Service*
+
+## Before reading
+
+The service implements the Camera HAL. It's best to have a bit of an
+understanding of how it works by reading the
+[HAL documentation first](https://source.android.com/docs/core/camera)
+
+![](https://source.android.com/static/docs/core/camera/images/ape_fwk_camera2.png)
+
+The HAL implementations are declared in: -
+[VirtualCameraDevice](./VirtualCameraDevice.h) -
+[VirtualCameraProvider](./VirtualCameraProvider.h) -
+[VirtualCameraSession](./VirtualCameraSession.h)
+
+## Current supported features
+
+Virtual Cameras report `EXTERNAL`
+[hardware level](https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL)
+but some
+[functionalities of `EXTERNAL`](https://developer.android.com/reference/android/hardware/camera2/CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL)
+hardware level are not fully supported.
+
+Here is a list of supported features - Single input multiple output stream and
+capture:
+
+-   Support for YUV and JPEG
+
+Notable missing features:
+
+-   Support for auto 3A (AWB, AE, AF): virtual camera will announce convergence
+    of 3A algorithm even though it can't receive any information about this from
+    the owner.
+
+-   No flash/torch support
+
+## Overview
+
+Graphic data are exchanged using the Surface infrastructure. Like any other
+Camera HAL, the Surfaces to write data into are received from the client.
+Virtual Camera exposes a **different** Surface onto which the owner can write
+data. In the middle, we use an EGL Texture which adapts (if needed) the producer
+data to the required consumer format (scaling only for now, but we might also
+add support for rotation and cropping in the future).
+
+When the client application requires multiple resolutions, the closest one among
+supported resolutions is used for the input data and the image data is down
+scaled for the lower resolutions.
+
+Depending on the type of output, the rendering pipelines change. Here is an
+overview of the YUV and JPEG pipelines.
+
+**YUV Rendering:**
+
+```
+Virtual Device Owner Surface[1] (Producer) --{binds to}--> EGL
+Texture[1] --{renders into}--> Client Surface[1-n] (Consumer)
+```
+
+**JPEG Rendering:**
+
+```
+Virtual Device Owner Surface[1] (Producer) --{binds to}--> EGL
+Texture[1] --{compress data into}--> temporary buffer --{renders into}-->
+Client Surface[1-n] (Consumer)
+```
+
+## Life of a capture request
+
+> Before reading the following, you must understand the concepts of
+> [CaptureRequest](https://developer.android.com/reference/android/hardware/camera2/CaptureRequest)
+> and
+> [OutputConfiguration](https://developer.android.com/reference/android/hardware/camera2/OutputConfiguration).
+
+1.  The consumer creates a session with one or more `Surfaces`
+
+2.  The VirtualCamera owner will receive a call to
+    `VirtualCameraCallback#onStreamConfigured` with a reference to another
+    `Suface` where it can write into.
+
+3.  The consumer will then start sending `CaptureRequests`. The owner will
+    receive a call to `VirtualCameraCallback#onProcessCaptureRequest`, at which
+    points it should write the required data into the surface it previously
+    received. At the same time, a new task will be enqueued in the render thread
+
+4.  The [VirtualCameraRenderThread](./VirtualCameraRenderThread.cc) will consume
+    the enqueued tasks as they come. It will wait for the producer to write into
+    the input Surface (using `Surface::waitForNextFrame`).
+
+    > **Note:** Since the Surface API allows us to wait for the next frame,
+    > there is no need for the producer to notify when the frame is ready by
+    > calling a `processCaptureResult()` equivalent.
+
+5.  The EGL Texture is updated with the content of the Surface.
+
+6.  The EGL Texture renders into the output Surfaces.
+
+7.  The Camera client is notified of the "shutter" event and the `CaptureResult`
+    is sent to the consumer.
+
+## EGL Rendering
+
+### The render thread
+
+The [VirtualCameraRenderThread](./VirtualCameraRenderThread.h) module takes care
+of rendering the input from the owner to the output via the EGL Texture. The
+rendering is done either to a JPEG buffer, which is the BLOB rendering for
+creating a JPEG or to a YUV buffer used mainly for preview Surfaces or video.
+Two EGLPrograms (shaders) defined in [EglProgram](./util/EglProgram.cc) handle
+the rendering of the data.
+
+### Initialization
+
+[EGlDisplayContext](./util/EglDisplayContext.h) initializes the whole EGL
+environment (Display, Surface, Context, and Config).
+
+The EGL Rendering is backed by a
+[ANativeWindow](https://developer.android.com/ndk/reference/group/a-native-window)
+which is just the native counterpart of the
+[Surface](https://developer.android.com/reference/android/view/Surface), which
+itself is the producer side of buffer queue, the consumer being either the
+display (Camera preview) or some encoder (to save the data or send it across the
+network).
+
+### More about OpenGL
+
+To better understand how the EGL rendering works the following resources can be
+used:
+
+Introduction to OpenGL: https://learnopengl.com/
+
+The official documentation of EGL API can be queried at:
+https://www.khronos.org/registry/egl/sdk/docs/man/xhtml/
+
+And using Google search with the following query:
+
+```
+[function name] site:https://registry.khronos.org/EGL/sdk/docs/man/html/
+
+// example: eglSwapBuffers site:https://registry.khronos.org/EGL/sdk/docs/man/html/
+```
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureRequest.h b/services/camera/virtualcamera/VirtualCameraCaptureRequest.h
new file mode 100644
index 0000000..cf5402e
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureRequest.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
+
+#include "VirtualCameraDevice.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Struct used to pass request settings in the different part of
+// the virtual camera system.
+struct RequestSettings {
+  // JPEG_QUALITY metadata
+  int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+
+  // JPEG_ORIENTATION metadata
+  int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
+
+  // JPEG_THUMBNAIL_SIZE metadata
+  Resolution thumbnailResolution = Resolution(0, 0);
+
+  // JPEG_THUMBNAIL_QUALITY metadata
+  int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+
+  // ANDROID_CONTROL_AE_TARGET_FPS_RANGE metadata
+  std::optional<FpsRange> fpsRange;
+
+  // CONTROL_CAPTURE_INTENT metadata
+  camera_metadata_enum_android_control_capture_intent_t captureIntent =
+      VirtualCameraDevice::kDefaultCaptureIntent;
+
+  // JPEG_GPS_LOCATION metadata
+  std::optional<GpsCoordinates> gpsCoordinates;
+
+  // CONTROL_AE_PRECAPTURE_TRIGGER metadata
+  std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+      aePrecaptureTrigger;
+};
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
new file mode 100644
index 0000000..a61f553
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "VirtualCameraCaptureResult.h"
+
+#include <cstdint>
+
+#include "VirtualCameraCaptureRequest.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "util/MetadataUtil.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+using ::aidl::android::hardware::camera::device::CameraMetadata;
+namespace {
+// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+// This roughly corresponds to frame latency, we set to
+// documented minimum of 2.
+static constexpr uint8_t kPipelineDepth = 2;
+
+}  // namespace
+
+CameraMetadata createCaptureResultMetadata(
+    const std::chrono::nanoseconds timestamp,
+    const RequestSettings& requestSettings,
+    const Resolution reportedSensorSize) {
+  // All of the keys used in the response needs to be referenced in
+  // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
+  // in VirtualCameraDevice.cc).
+  MetadataBuilder builder =
+      MetadataBuilder()
+          .setAberrationCorrectionMode(
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlAeAvailableAntibandingModes(
+              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
+          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+          .setControlAeExposureCompensation(0)
+          .setControlAeLockAvailable(false)
+          .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
+          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+          .setControlAePrecaptureTrigger(
+              // Limited devices are expected to have precapture ae enabled and
+              // respond to cancellation request. Since we don't actuall support
+              // AE at all, let's just respect the cancellation expectation in
+              // case it's requested
+              requestSettings.aePrecaptureTrigger ==
+                      ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+                  ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+                  : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
+          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+          .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
+          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
+          .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
+          .setControlCaptureIntent(requestSettings.captureIntent)
+          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
+          .setControlVideoStabilizationMode(
+              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
+          .setCropRegion(0, 0, reportedSensorSize.width,
+                         reportedSensorSize.height)
+          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setFlashMode(ANDROID_FLASH_MODE_OFF)
+          .setFocalLength(VirtualCameraDevice::kFocalLength)
+          .setJpegQuality(requestSettings.jpegQuality)
+          .setJpegOrientation(requestSettings.jpegOrientation)
+          .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+                                requestSettings.thumbnailResolution.height)
+          .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+          .setLensOpticalStabilizationMode(
+              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
+          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+          .setPipelineDepth(kPipelineDepth)
+          .setSensorTimestamp(timestamp)
+          .setStatisticsHotPixelMapMode(
+              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
+          .setStatisticsLensShadingMapMode(
+              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
+          .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
+
+  if (requestSettings.fpsRange.has_value()) {
+    builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
+  }
+
+  if (requestSettings.gpsCoordinates.has_value()) {
+    const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
+    builder.setJpegGpsCoordinates(coordinates);
+  }
+
+  std::unique_ptr<CameraMetadata> metadata = builder.build();
+
+  if (metadata == nullptr) {
+    ALOGE("%s: Failed to build capture result metadata", __func__);
+    return CameraMetadata();
+  }
+  return std::move(*metadata);
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.h b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
new file mode 100644
index 0000000..9e5b4d7
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
+
+#include <chrono>
+#include <cstdint>
+#include <cstring>
+#include <future>
+#include <memory>
+#include <mutex>
+#include <thread>
+#include <utility>
+#include <vector>
+
+#include "Exif.h"
+#include "GLES/gl.h"
+#include "VirtualCameraCaptureRequest.h"
+#include "VirtualCameraDevice.h"
+#include "VirtualCameraRenderThread.h"
+#include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Construct the Metadata for the Capture result based on the request
+// settings, timestamp and reported sensore size
+::aidl::android::hardware::camera::device::CameraMetadata
+createCaptureResultMetadata(std::chrono::nanoseconds timestamp,
+                            const RequestSettings& requestSettings,
+                            Resolution reportedSensorSize);
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.cc b/services/camera/virtualcamera/VirtualCameraDevice.cc
index fe9e0ed..c3be62b 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.cc
+++ b/services/camera/virtualcamera/VirtualCameraDevice.cc
@@ -68,18 +68,19 @@
 
 using namespace std::chrono_literals;
 
-// Prefix of camera name - "device@1.1/virtual/{numerical_id}"
+// Prefix of camera name - "device@1.1/virtual/{camera_id}"
 const char* kDevicePathPrefix = "device@1.1/virtual/";
 
 constexpr int32_t kMaxJpegSize = 3 * 1024 * 1024 /*3MiB*/;
 
-constexpr int32_t kMinFps = 15;
-
 constexpr std::chrono::nanoseconds kMaxFrameDuration =
-    std::chrono::duration_cast<std::chrono::nanoseconds>(1e9ns / kMinFps);
+    std::chrono::duration_cast<std::chrono::nanoseconds>(
+        1e9ns / VirtualCameraDevice::kMinFps);
 
 constexpr uint8_t kPipelineMaxDepth = 2;
 
+constexpr int k30Fps = 30;
+
 constexpr MetadataBuilder::ControlRegion kDefaultEmptyControlRegion{};
 
 const std::array<Resolution, 5> kStandardJpegThumbnailSizes{
@@ -130,16 +131,20 @@
   std::set<FpsRange> availableRanges;
 
   for (const SupportedStreamConfiguration& config : configs) {
-    availableRanges.insert({.minFps = kMinFps, .maxFps = config.maxFps});
+    availableRanges.insert(
+        {.minFps = VirtualCameraDevice::kMinFps, .maxFps = config.maxFps});
     availableRanges.insert({.minFps = config.maxFps, .maxFps = config.maxFps});
   }
 
   if (std::any_of(configs.begin(), configs.end(),
                   [](const SupportedStreamConfiguration& config) {
-                    return config.maxFps >= 30;
+                    return config.maxFps >= k30Fps;
                   })) {
-    availableRanges.insert({.minFps = kMinFps, .maxFps = 30});
-    availableRanges.insert({.minFps = 30, .maxFps = 30});
+    // Extend the set of available ranges with (minFps <= 15, 30) & (30, 30) as
+    // required by CDD.
+    availableRanges.insert(
+        {.minFps = VirtualCameraDevice::kMinFps, .maxFps = k30Fps});
+    availableRanges.insert({.minFps = k30Fps, .maxFps = k30Fps});
   }
 
   return std::vector<FpsRange>(availableRanges.begin(), availableRanges.end());
@@ -399,8 +404,8 @@
 }  // namespace
 
 VirtualCameraDevice::VirtualCameraDevice(
-    const uint32_t cameraId, const VirtualCameraConfiguration& configuration,
-    int32_t deviceId)
+    const std::string& cameraId,
+    const VirtualCameraConfiguration& configuration, int32_t deviceId)
     : mCameraId(cameraId),
       mVirtualCameraClientCallback(configuration.virtualCameraCallback),
       mSupportedInputConfigurations(configuration.supportedStreamConfigs) {
@@ -577,11 +582,11 @@
 }
 
 binder_status_t VirtualCameraDevice::dump(int fd, const char**, uint32_t) {
-  ALOGD("Dumping virtual camera %d", mCameraId);
+  ALOGD("Dumping virtual camera %s", mCameraId.c_str());
   const char* indent = "  ";
   const char* doubleIndent = "    ";
-  dprintf(fd, "%svirtual_camera %d belongs to virtual device %d\n", indent,
-          mCameraId,
+  dprintf(fd, "%svirtual_camera %s belongs to virtual device %d\n", indent,
+          mCameraId.c_str(),
           getDeviceId(mCameraCharacteristics)
               .value_or(VirtualCameraService::kDefaultDeviceId));
   dprintf(fd, "%sSupportedStreamConfiguration:\n", indent);
@@ -592,7 +597,7 @@
 }
 
 std::string VirtualCameraDevice::getCameraName() const {
-  return std::string(kDevicePathPrefix) + std::to_string(mCameraId);
+  return std::string(kDevicePathPrefix) + mCameraId;
 }
 
 const std::vector<SupportedStreamConfiguration>&
@@ -613,6 +618,10 @@
   return maxResolution.value();
 }
 
+int VirtualCameraDevice::allocateInputStreamId() {
+  return mNextInputStreamId++;
+}
+
 std::shared_ptr<VirtualCameraDevice> VirtualCameraDevice::sharedFromThis() {
   // SharedRefBase which BnCameraDevice inherits from breaks
   // std::enable_shared_from_this. This is recommended replacement for
diff --git a/services/camera/virtualcamera/VirtualCameraDevice.h b/services/camera/virtualcamera/VirtualCameraDevice.h
index cba0674..a33d4cf 100644
--- a/services/camera/virtualcamera/VirtualCameraDevice.h
+++ b/services/camera/virtualcamera/VirtualCameraDevice.h
@@ -37,7 +37,7 @@
     : public ::aidl::android::hardware::camera::device::BnCameraDevice {
  public:
   explicit VirtualCameraDevice(
-      uint32_t cameraId,
+      const std::string& cameraId,
       const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
           configuration,
       int32_t deviceId);
@@ -92,10 +92,12 @@
   binder_status_t dump(int fd, const char** args, uint32_t numArgs) override;
 
   // Returns unique virtual camera name in form
-  // "device@{major}.{minor}/virtual/{numerical_id}"
+  // "device@{major}.{minor}/virtual/{camera_id}"
   std::string getCameraName() const;
 
-  uint32_t getCameraId() const { return mCameraId; }
+  const std::string& getCameraId() const {
+    return mCameraId;
+  }
 
   const std::vector<
       aidl::android::companion::virtualcamera::SupportedStreamConfiguration>&
@@ -104,6 +106,9 @@
   // Returns largest supported input resolution.
   Resolution getMaxInputResolution() const;
 
+  // Allocate and return next id for input stream (input surface).
+  int allocateInputStreamId();
+
   // Maximal number of RAW streams - virtual camera doesn't support RAW streams.
   static constexpr int32_t kMaxNumberOfRawStreams = 0;
 
@@ -126,6 +131,9 @@
   // Default JPEG orientation.
   static constexpr uint8_t kDefaultJpegOrientation = 0;
 
+  // Lowest min fps advertised in supported fps ranges.
+  static constexpr int kMinFps = 1;
+
   // Default Make and Model for Exif
   static constexpr char kDefaultMakeAndModel[] = "Android Virtual Camera";
 
@@ -135,7 +143,7 @@
  private:
   std::shared_ptr<VirtualCameraDevice> sharedFromThis();
 
-  const uint32_t mCameraId;
+  const std::string mCameraId;
   const std::shared_ptr<
       ::aidl::android::companion::virtualcamera::IVirtualCameraCallback>
       mVirtualCameraClientCallback;
@@ -145,6 +153,8 @@
   const std::vector<
       aidl::android::companion::virtualcamera::SupportedStreamConfiguration>
       mSupportedInputConfigurations;
+
+  std::atomic_int mNextInputStreamId;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.cc b/services/camera/virtualcamera/VirtualCameraProvider.cc
index 67eaec0..b2c10f6 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.cc
+++ b/services/camera/virtualcamera/VirtualCameraProvider.cc
@@ -150,11 +150,10 @@
 }
 
 std::shared_ptr<VirtualCameraDevice> VirtualCameraProvider::createCamera(
-    const VirtualCameraConfiguration& configuration, const int cameraId,
-    const int32_t deviceId) {
-  if (cameraId < 0) {
-    ALOGE("%s: Cannot create camera with negative id. cameraId: %d", __func__,
-          cameraId);
+    const VirtualCameraConfiguration& configuration,
+    const std::string& cameraId, const int32_t deviceId) {
+  if (cameraId.empty()) {
+    ALOGE("%s: Cannot create camera with empty cameraId", __func__);
     return nullptr;
   }
 
diff --git a/services/camera/virtualcamera/VirtualCameraProvider.h b/services/camera/virtualcamera/VirtualCameraProvider.h
index c536547..606b44c 100644
--- a/services/camera/virtualcamera/VirtualCameraProvider.h
+++ b/services/camera/virtualcamera/VirtualCameraProvider.h
@@ -77,7 +77,7 @@
   std::shared_ptr<VirtualCameraDevice> createCamera(
       const aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
           configuration,
-      int cameraId, int32_t deviceId);
+      const std::string& cameraId, int32_t deviceId);
 
   std::shared_ptr<VirtualCameraDevice> getCamera(const std::string& name);
 
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index ca62cce..bf4a45d 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -14,10 +14,12 @@
  * limitations under the License.
  */
 
-#include "hardware/gralloc.h"
+// #define LOG_NDEBUG 0
 #define LOG_TAG "VirtualCameraRenderThread"
 #include "VirtualCameraRenderThread.h"
 
+#include <android_companion_virtualdevice_flags.h>
+
 #include <chrono>
 #include <cstdint>
 #include <cstring>
@@ -25,10 +27,12 @@
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <utility>
 #include <vector>
 
 #include "Exif.h"
 #include "GLES/gl.h"
+#include "VirtualCameraCaptureResult.h"
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/common/Status.h"
@@ -50,8 +54,6 @@
 #include "ui/Rect.h"
 #include "util/EglFramebuffer.h"
 #include "util/JpegUtil.h"
-#include "util/MetadataUtil.h"
-#include "util/TestPatternHelper.h"
 #include "util/Util.h"
 #include "utils/Errors.h"
 
@@ -79,96 +81,24 @@
 
 namespace {
 
+// helper type for the visitor
+template <class... Ts>
+struct overloaded : Ts... {
+  using Ts::operator()...;
+};
+// explicit deduction guide (not needed as of C++20)
+template <class... Ts>
+overloaded(Ts...) -> overloaded<Ts...>;
+
 using namespace std::chrono_literals;
 
+namespace flags = ::android::companion::virtualdevice::flags;
+
 static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
 
-// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
-// This roughly corresponds to frame latency, we set to
-// documented minimum of 2.
-static constexpr uint8_t kPipelineDepth = 2;
-
 static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
 
-CameraMetadata createCaptureResultMetadata(
-    const std::chrono::nanoseconds timestamp,
-    const RequestSettings& requestSettings,
-    const Resolution reportedSensorSize) {
-  // All of the keys used in the response needs to be referenced in
-  // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
-  // in VirtualCameraDevice.cc).
-  MetadataBuilder builder =
-      MetadataBuilder()
-          .setAberrationCorrectionMode(
-              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
-          .setControlAeAvailableAntibandingModes(
-              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
-          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
-          .setControlAeExposureCompensation(0)
-          .setControlAeLockAvailable(false)
-          .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
-          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
-          .setControlAePrecaptureTrigger(
-              // Limited devices are expected to have precapture ae enabled and
-              // respond to cancellation request. Since we don't actuall support
-              // AE at all, let's just respect the cancellation expectation in
-              // case it's requested
-              requestSettings.aePrecaptureTrigger ==
-                      ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
-                  ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
-                  : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
-          .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
-          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
-          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
-          .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
-          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
-          .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
-          .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
-          .setControlCaptureIntent(requestSettings.captureIntent)
-          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
-          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
-          .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
-          .setControlVideoStabilizationMode(
-              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
-          .setCropRegion(0, 0, reportedSensorSize.width,
-                         reportedSensorSize.height)
-          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
-          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
-          .setFlashMode(ANDROID_FLASH_MODE_OFF)
-          .setFocalLength(VirtualCameraDevice::kFocalLength)
-          .setJpegQuality(requestSettings.jpegQuality)
-          .setJpegOrientation(requestSettings.jpegOrientation)
-          .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
-                                requestSettings.thumbnailResolution.height)
-          .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
-          .setLensOpticalStabilizationMode(
-              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
-          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
-          .setPipelineDepth(kPipelineDepth)
-          .setSensorTimestamp(timestamp)
-          .setStatisticsHotPixelMapMode(
-              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
-          .setStatisticsLensShadingMapMode(
-              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
-          .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
-
-  if (requestSettings.fpsRange.has_value()) {
-    builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
-  }
-
-  if (requestSettings.gpsCoordinates.has_value()) {
-    const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
-    builder.setJpegGpsCoordinates(coordinates);
-  }
-
-  std::unique_ptr<CameraMetadata> metadata = builder.build();
-
-  if (metadata == nullptr) {
-    ALOGE("%s: Failed to build capture result metadata", __func__);
-    return CameraMetadata();
-  }
-  return std::move(*metadata);
-}
+static constexpr UpdateTextureTask kUpdateTextureTask;
 
 NotifyMsg createShutterNotifyMsg(int frameNumber,
                                  std::chrono::nanoseconds timestamp) {
@@ -190,12 +120,12 @@
 
 NotifyMsg createRequestErrorNotifyMsg(int frameNumber) {
   NotifyMsg msg;
-  msg.set<NotifyMsg::Tag::error>(ErrorMsg{
-      .frameNumber = frameNumber,
-      // errorStreamId needs to be set to -1 for ERROR_REQUEST
-      // (not tied to specific stream).
-      .errorStreamId = -1,
-      .errorCode = ErrorCode::ERROR_REQUEST});
+  msg.set<NotifyMsg::Tag::error>(
+      ErrorMsg{.frameNumber = frameNumber,
+               // errorStreamId needs to be set to -1 for ERROR_REQUEST
+               // (not tied to specific stream).
+               .errorStreamId = -1,
+               .errorCode = ErrorCode::ERROR_REQUEST});
   return msg;
 }
 
@@ -278,6 +208,31 @@
   return app1Data;
 }
 
+std::chrono::nanoseconds getMaxFrameDuration(
+    const RequestSettings& requestSettings) {
+  if (requestSettings.fpsRange.has_value()) {
+    return std::chrono::nanoseconds(static_cast<uint64_t>(
+        1e9 / std::max(1, requestSettings.fpsRange->minFps)));
+  }
+  return std::chrono::nanoseconds(
+      static_cast<uint64_t>(1e9 / VirtualCameraDevice::kMinFps));
+}
+
+class FrameAvailableListenerProxy : public ConsumerBase::FrameAvailableListener {
+ public:
+  FrameAvailableListenerProxy(std::function<void()> callback)
+      : mOnFrameAvailableCallback(callback) {
+  }
+
+  virtual void onFrameAvailable(const BufferItem&) override {
+    ALOGV("%s: onFrameAvailable", __func__);
+    mOnFrameAvailableCallback();
+  }
+
+ private:
+  std::function<void()> mOnFrameAvailableCallback;
+};
+
 }  // namespace
 
 CaptureRequestBuffer::CaptureRequestBuffer(int streamId, int bufferId,
@@ -300,12 +255,12 @@
 VirtualCameraRenderThread::VirtualCameraRenderThread(
     VirtualCameraSessionContext& sessionContext,
     const Resolution inputSurfaceSize, const Resolution reportedSensorSize,
-    std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback, bool testMode)
+    std::shared_ptr<ICameraDeviceCallback> cameraDeviceCallback)
     : mCameraDeviceCallback(cameraDeviceCallback),
       mInputSurfaceSize(inputSurfaceSize),
       mReportedSensorSize(reportedSensorSize),
-      mTestMode(testMode),
-      mSessionContext(sessionContext) {
+      mSessionContext(sessionContext),
+      mInputSurfaceFuture(mInputSurfacePromise.get_future()) {
 }
 
 VirtualCameraRenderThread::~VirtualCameraRenderThread() {
@@ -336,9 +291,25 @@
   return mRequestSettings;
 }
 
+void VirtualCameraRenderThread::requestTextureUpdate() {
+  std::lock_guard<std::mutex> lock(mLock);
+  // If queue is not empty, we don't need to set the mTextureUpdateRequested
+  // flag, since the texture will be updated during ProcessCaptureRequestTask
+  // processing anyway.
+  if (mQueue.empty()) {
+    mTextureUpdateRequested = true;
+    mCondVar.notify_one();
+  }
+}
+
 void VirtualCameraRenderThread::enqueueTask(
     std::unique_ptr<ProcessCaptureRequestTask> task) {
   std::lock_guard<std::mutex> lock(mLock);
+  // When enqueving process capture request task, clear the
+  // mTextureUpdateRequested flag. If this flag is set, the texture was not yet
+  // updated and it will be updated when processing ProcessCaptureRequestTask
+  // anyway.
+  mTextureUpdateRequested = false;
   mQueue.emplace_back(std::move(task));
   mCondVar.notify_one();
 }
@@ -365,11 +336,10 @@
 }
 
 sp<Surface> VirtualCameraRenderThread::getInputSurface() {
-  return mInputSurfacePromise.get_future().get();
+  return mInputSurfaceFuture.get();
 }
 
-std::unique_ptr<ProcessCaptureRequestTask>
-VirtualCameraRenderThread::dequeueTask() {
+RenderThreadTask VirtualCameraRenderThread::dequeueTask() {
   std::unique_lock<std::mutex> lock(mLock);
   // Clang's thread safety analysis doesn't perform alias analysis,
   // so it doesn't support moveable std::unique_lock.
@@ -380,12 +350,20 @@
   ScopedLockAssertion lockAssertion(mLock);
 
   mCondVar.wait(lock, [this]() REQUIRES(mLock) {
-    return mPendingExit || !mQueue.empty();
+    return mPendingExit || mTextureUpdateRequested || !mQueue.empty();
   });
   if (mPendingExit) {
-    return nullptr;
+    // Render thread task with null task signals render thread to terminate.
+    return RenderThreadTask(nullptr);
   }
-  std::unique_ptr<ProcessCaptureRequestTask> task = std::move(mQueue.front());
+  if (mTextureUpdateRequested) {
+    // If mTextureUpdateRequested, it's guaranteed the queue is empty, return
+    // kUpdateTextureTask to signal we want render thread to update the texture
+    // (consume buffer from the queue).
+    mTextureUpdateRequested = false;
+    return RenderThreadTask(kUpdateTextureTask);
+  }
+  RenderThreadTask task(std::move(mQueue.front()));
   mQueue.pop_front();
   return task;
 }
@@ -400,15 +378,23 @@
       EglTextureProgram::TextureFormat::RGBA);
   mEglSurfaceTexture = std::make_unique<EglSurfaceTexture>(
       mInputSurfaceSize.width, mInputSurfaceSize.height);
+  sp<FrameAvailableListenerProxy> frameAvailableListener =
+      sp<FrameAvailableListenerProxy>::make(
+          [this]() { requestTextureUpdate(); });
+  mEglSurfaceTexture->setFrameAvailableListener(frameAvailableListener);
 
-  sp<Surface> inputSurface = mEglSurfaceTexture->getSurface();
-  if (mTestMode) {
-    inputSurface->connect(NATIVE_WINDOW_API_CPU, false, nullptr);
-  }
   mInputSurfacePromise.set_value(mEglSurfaceTexture->getSurface());
 
-  while (std::unique_ptr<ProcessCaptureRequestTask> task = dequeueTask()) {
-    processCaptureRequest(*task);
+  while (RenderThreadTask task = dequeueTask()) {
+    std::visit(
+        overloaded{[this](const std::unique_ptr<ProcessCaptureRequestTask>& t) {
+                     processTask(*t);
+                   },
+                   [this](const UpdateTextureTask&) {
+                     ALOGV("Idle update of the texture");
+                     mEglSurfaceTexture->updateTexture();
+                   }},
+        task);
   }
 
   // Destroy EGL utilities still on the render thread.
@@ -420,11 +406,56 @@
   ALOGV("Render thread exiting");
 }
 
-void VirtualCameraRenderThread::processCaptureRequest(
+void VirtualCameraRenderThread::processTask(
     const ProcessCaptureRequestTask& request) {
-  const std::chrono::nanoseconds timestamp =
+  std::chrono::nanoseconds timestamp =
       std::chrono::duration_cast<std::chrono::nanoseconds>(
           std::chrono::steady_clock::now().time_since_epoch());
+  const std::chrono::nanoseconds lastAcquisitionTimestamp(
+      mLastAcquisitionTimestampNanoseconds.exchange(timestamp.count(),
+                                                    std::memory_order_relaxed));
+
+  if (request.getRequestSettings().fpsRange) {
+    int maxFps = std::max(1, request.getRequestSettings().fpsRange->maxFps);
+    timestamp = throttleRendering(maxFps, lastAcquisitionTimestamp, timestamp);
+  }
+
+  // Calculate the maximal amount of time we can afford to wait for next frame.
+  const std::chrono::nanoseconds maxFrameDuration =
+      getMaxFrameDuration(request.getRequestSettings());
+  const std::chrono::nanoseconds elapsedDuration =
+      timestamp - lastAcquisitionTimestamp;
+  if (elapsedDuration < maxFrameDuration) {
+    // We can afford to wait for next frame.
+    // Note that if there's already new frame in the input Surface, the call
+    // below returns immediatelly.
+    bool gotNewFrame = mEglSurfaceTexture->waitForNextFrame(maxFrameDuration -
+                                                            elapsedDuration);
+    timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+        std::chrono::steady_clock::now().time_since_epoch());
+    if (!gotNewFrame) {
+      ALOGV(
+          "%s: No new frame received on input surface after waiting for "
+          "%" PRIu64 "ns, repeating last frame.",
+          __func__,
+          static_cast<uint64_t>((timestamp - lastAcquisitionTimestamp).count()));
+    }
+    mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+                                               std::memory_order_relaxed);
+  }
+  // Acquire new (most recent) image from the Surface.
+  mEglSurfaceTexture->updateTexture();
+  std::chrono::nanoseconds captureTimestamp = timestamp;
+
+  if (flags::camera_timestamp_from_surface()) {
+    std::chrono::nanoseconds surfaceTimestamp =
+        getSurfaceTimestamp(elapsedDuration);
+    if (surfaceTimestamp.count() > 0) {
+      captureTimestamp = surfaceTimestamp;
+    }
+    ALOGV("%s captureTimestamp:%lld timestamp:%lld", __func__,
+          captureTimestamp.count(), timestamp.count());
+  }
 
   CaptureResult captureResult;
   captureResult.fmqResultSize = 0;
@@ -434,19 +465,11 @@
   captureResult.inputBuffer.streamId = -1;
   captureResult.physicalCameraMetadata.resize(0);
   captureResult.result = createCaptureResultMetadata(
-      timestamp, request.getRequestSettings(), mReportedSensorSize);
+      captureTimestamp, request.getRequestSettings(), mReportedSensorSize);
 
   const std::vector<CaptureRequestBuffer>& buffers = request.getBuffers();
   captureResult.outputBuffers.resize(buffers.size());
 
-  if (mTestMode) {
-    // In test mode let's just render something to the Surface ourselves.
-    renderTestPatternYCbCr420(mEglSurfaceTexture->getSurface(),
-                              request.getFrameNumber());
-  }
-
-  mEglSurfaceTexture->updateTexture();
-
   for (int i = 0; i < buffers.size(); ++i) {
     const CaptureRequestBuffer& reqBuffer = buffers[i];
     StreamBuffer& resBuffer = captureResult.outputBuffers[i];
@@ -476,7 +499,7 @@
   }
 
   std::vector<NotifyMsg> notifyMsg{
-      createShutterNotifyMsg(request.getFrameNumber(), timestamp)};
+      createShutterNotifyMsg(request.getFrameNumber(), captureTimestamp)};
   for (const StreamBuffer& resBuffer : captureResult.outputBuffers) {
     if (resBuffer.status != BufferStatus::OK) {
       notifyMsg.push_back(createBufferErrorNotifyMsg(request.getFrameNumber(),
@@ -505,6 +528,51 @@
   ALOGV("%s: Successfully called processCaptureResult", __func__);
 }
 
+std::chrono::nanoseconds VirtualCameraRenderThread::throttleRendering(
+    int maxFps, std::chrono::nanoseconds lastAcquisitionTimestamp,
+    std::chrono::nanoseconds timestamp) {
+  const std::chrono::nanoseconds minFrameDuration(
+      static_cast<uint64_t>(1e9 / maxFps));
+  const std::chrono::nanoseconds frameDuration =
+      timestamp - lastAcquisitionTimestamp;
+  if (frameDuration < minFrameDuration) {
+    // We're too fast for the configured maxFps, let's wait a bit.
+    const std::chrono::nanoseconds sleepTime = minFrameDuration - frameDuration;
+    ALOGV("Current frame duration would  be %" PRIu64
+          " ns corresponding to, "
+          "sleeping for %" PRIu64
+          " ns before updating texture to match maxFps %d",
+          static_cast<uint64_t>(frameDuration.count()),
+          static_cast<uint64_t>(sleepTime.count()), maxFps);
+
+    std::this_thread::sleep_for(sleepTime);
+    timestamp = std::chrono::duration_cast<std::chrono::nanoseconds>(
+        std::chrono::steady_clock::now().time_since_epoch());
+    mLastAcquisitionTimestampNanoseconds.store(timestamp.count(),
+                                               std::memory_order_relaxed);
+  }
+  return timestamp;
+}
+
+std::chrono::nanoseconds VirtualCameraRenderThread::getSurfaceTimestamp(
+    std::chrono::nanoseconds timeSinceLastFrame) {
+  std::chrono::nanoseconds surfaceTimestamp = mEglSurfaceTexture->getTimestamp();
+  if (surfaceTimestamp.count() < 0) {
+    uint64_t lastSurfaceTimestamp = mLastSurfaceTimestampNanoseconds.load();
+    if (lastSurfaceTimestamp > 0) {
+      // The timestamps were provided by the producer but we are
+      // repeating the last frame, so we increase the previous timestamp by
+      // the elapsed time sinced its capture, otherwise the camera framework
+      // will discard the frame.
+      surfaceTimestamp = std::chrono::nanoseconds(lastSurfaceTimestamp +
+                                                  timeSinceLastFrame.count());
+    }
+  }
+  mLastSurfaceTimestampNanoseconds.store(surfaceTimestamp.count(),
+                                         std::memory_order_relaxed);
+  return surfaceTimestamp;
+}
+
 void VirtualCameraRenderThread::flushCaptureRequest(
     const ProcessCaptureRequestTask& request) {
   CaptureResult captureResult;
@@ -565,7 +633,7 @@
     return {};
   }
 
-  // TODO(b/324383963) Add support for letterboxing if the thumbnail size
+  // TODO(b/324383963) Add support for letterboxing if the thumbnail sizese
   // doesn't correspond
   //  to input texture aspect ratio.
   if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
@@ -636,9 +704,10 @@
     return status;
   }
 
-  PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
+  PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
                              fence);
   if (planesLock.getStatus() != OK) {
+    ALOGE("Failed to lock hwBuffer planes");
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
@@ -646,23 +715,35 @@
       createExif(Resolution(stream->width, stream->height), resultMetadata,
                  createThumbnail(requestSettings.thumbnailResolution,
                                  requestSettings.thumbnailJpegQuality));
+
+  unsigned long outBufferSize = stream->bufferSize - sizeof(CameraBlob);
+  void* outBuffer = (*planesLock).planes[0].data;
   std::optional<size_t> compressedSize = compressJpeg(
       stream->width, stream->height, requestSettings.jpegQuality,
-      framebuffer->getHardwareBuffer(), app1ExifData,
-      stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
+      framebuffer->getHardwareBuffer(), app1ExifData, outBufferSize, outBuffer);
 
   if (!compressedSize.has_value()) {
     ALOGE("%s: Failed to compress JPEG image", __func__);
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
+  // Add the transport header at the end of the JPEG output buffer.
+  //
+  // jpegBlobId must start at byte[buffer_size - sizeof(CameraBlob)],
+  // where the buffer_size is the size of gralloc buffer.
+  //
+  // See
+  // hardware/interfaces/camera/device/aidl/android/hardware/camera/device/CameraBlobId.aidl
+  // for the full explanation of the following code.
   CameraBlob cameraBlob{
       .blobId = CameraBlobId::JPEG,
       .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
 
-  memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
-             (stream->bufferSize - sizeof(cameraBlob)),
-         &cameraBlob, sizeof(cameraBlob));
+  // Copy the cameraBlob to the end of the JPEG buffer.
+  uint8_t* jpegStreamEndAddress =
+      reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
+      (stream->bufferSize - sizeof(cameraBlob));
+  memcpy(jpegStreamEndAddress, &cameraBlob, sizeof(cameraBlob));
 
   ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
         __func__, compressedSize.value());
@@ -719,8 +800,8 @@
 
   Rect viewportRect =
       viewport.value_or(Rect(framebuffer.getWidth(), framebuffer.getHeight()));
-  glViewport(viewportRect.leftTop().x, viewportRect.leftTop().y,
-             viewportRect.getWidth(), viewportRect.getHeight());
+  glViewport(viewportRect.left, viewportRect.top, viewportRect.getWidth(),
+             viewportRect.getHeight());
 
   sp<GraphicBuffer> textureBuffer = mEglSurfaceTexture->getCurrentBuffer();
   if (textureBuffer == nullptr) {
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index e222d5b..a35eea1 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -17,13 +17,17 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERARENDERTHREAD_H
 
+#include <atomic>
+#include <chrono>
 #include <cstdint>
 #include <deque>
 #include <future>
 #include <memory>
 #include <thread>
+#include <variant>
 #include <vector>
 
+#include "VirtualCameraCaptureRequest.h"
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
@@ -33,7 +37,6 @@
 #include "util/EglFramebuffer.h"
 #include "util/EglProgram.h"
 #include "util/EglSurfaceTexture.h"
-#include "util/MetadataUtil.h"
 #include "util/Util.h"
 
 namespace android {
@@ -55,19 +58,6 @@
   const sp<Fence> mFence;
 };
 
-struct RequestSettings {
-  int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
-  int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
-  Resolution thumbnailResolution = Resolution(0, 0);
-  int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
-  std::optional<FpsRange> fpsRange;
-  camera_metadata_enum_android_control_capture_intent_t captureIntent =
-      VirtualCameraDevice::kDefaultCaptureIntent;
-  std::optional<GpsCoordinates> gpsCoordinates;
-  std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
-      aePrecaptureTrigger;
-};
-
 // Represents single capture request to fill set of buffers.
 class ProcessCaptureRequestTask {
  public:
@@ -93,6 +83,24 @@
   const RequestSettings mRequestSettings;
 };
 
+struct UpdateTextureTask {};
+
+struct RenderThreadTask
+    : public std::variant<std::unique_ptr<ProcessCaptureRequestTask>,
+                          UpdateTextureTask> {
+  // Allow implicit conversion to bool.
+  //
+  // Returns false, if the RenderThreadTask consist of null
+  // ProcessCaptureRequestTask, which signals that the thread should terminate.
+  operator bool() const {
+    const bool isExitSignal =
+        std::holds_alternative<std::unique_ptr<ProcessCaptureRequestTask>>(
+            *this) &&
+        std::get<std::unique_ptr<ProcessCaptureRequestTask>>(*this) == nullptr;
+    return !isExitSignal;
+  }
+};
+
 // Wraps dedicated rendering thread and rendering business with corresponding
 // input surface.
 class VirtualCameraRenderThread {
@@ -110,8 +118,7 @@
       Resolution reportedSensorSize,
       std::shared_ptr<
           ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
-          cameraDeviceCallback,
-      bool testMode = false);
+          cameraDeviceCallback);
 
   ~VirtualCameraRenderThread();
 
@@ -120,6 +127,12 @@
   // Stop rendering thread.
   void stop();
 
+  // Send request to render thread to update the texture.
+  // Currently queued buffers in the input surface will be consumed and the most
+  // recent buffer in the input surface will be attached to the texture), all
+  // other buffers will be returned to the buffer queue.
+  void requestTextureUpdate() EXCLUDES(mLock);
+
   // Equeue capture task for processing on render thread.
   void enqueueTask(std::unique_ptr<ProcessCaptureRequestTask> task)
       EXCLUDES(mLock);
@@ -131,13 +144,13 @@
   sp<Surface> getInputSurface();
 
  private:
-  std::unique_ptr<ProcessCaptureRequestTask> dequeueTask() EXCLUDES(mLock);
+  RenderThreadTask dequeueTask() EXCLUDES(mLock);
 
   // Rendering thread entry point.
   void threadLoop();
 
   // Process single capture request task (always called on render thread).
-  void processCaptureRequest(const ProcessCaptureRequestTask& captureRequestTask);
+  void processTask(const ProcessCaptureRequestTask& captureRequestTask);
 
   // Flush single capture request task returning the error status immediately.
   void flushCaptureRequest(const ProcessCaptureRequestTask& captureRequestTask);
@@ -176,6 +189,22 @@
       EglFrameBuffer& framebuffer, sp<Fence> fence = nullptr,
       std::optional<Rect> viewport = std::nullopt);
 
+  // Throttle the current thread to ensure that we are not rendering faster than
+  // the provided maxFps.
+  // maxFps: The maximum fps in the capture request
+  // lastAcquisitionTimestamp: timestamp of the previous frame
+  // timestamp: the current capture time
+  // Returns the time at which the capture has happened after throttling.
+  std::chrono::nanoseconds throttleRendering(
+      int maxFps, std::chrono::nanoseconds lastAcquisitionTimestamp,
+      std::chrono::nanoseconds timestamp);
+
+  // Fetch the timestamp of the latest buffer from the EGL Surface
+  // timeSinceLastFrame: The elapsed time since the last captured frame.
+  // Return 0 if no timestamp has been associated to this surface by the producer.
+  std::chrono::nanoseconds getSurfaceTimestamp(
+      std::chrono::nanoseconds timeSinceLastFrame);
+
   // Camera callback
   const std::shared_ptr<
       ::aidl::android::hardware::camera::device::ICameraDeviceCallback>
@@ -183,7 +212,6 @@
 
   const Resolution mInputSurfaceSize;
   const Resolution mReportedSensorSize;
-  const int mTestMode;
 
   VirtualCameraSessionContext& mSessionContext;
 
@@ -193,8 +221,13 @@
   std::mutex mLock;
   std::deque<std::unique_ptr<ProcessCaptureRequestTask>> mQueue GUARDED_BY(mLock);
   std::condition_variable mCondVar;
+  volatile bool mTextureUpdateRequested GUARDED_BY(mLock);
   volatile bool mPendingExit GUARDED_BY(mLock);
 
+  // Acquisition timestamp of last frame.
+  std::atomic<uint64_t> mLastAcquisitionTimestampNanoseconds;
+  std::atomic<uint64_t> mLastSurfaceTimestampNanoseconds;
+
   // EGL helpers - constructed and accessed only from rendering thread.
   std::unique_ptr<EglDisplayContext> mEglDisplayContext;
   std::unique_ptr<EglTextureProgram> mEglTextureYuvProgram;
@@ -202,6 +235,7 @@
   std::unique_ptr<EglSurfaceTexture> mEglSurfaceTexture;
 
   std::promise<sp<Surface>> mInputSurfacePromise;
+  std::shared_future<sp<Surface>> mInputSurfaceFuture;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraService.cc b/services/camera/virtualcamera/VirtualCameraService.cc
index b5b07f0..7466089 100644
--- a/services/camera/virtualcamera/VirtualCameraService.cc
+++ b/services/camera/virtualcamera/VirtualCameraService.cc
@@ -30,10 +30,12 @@
 
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraProvider.h"
+#include "VirtualCameraTestInstance.h"
 #include "aidl/android/companion/virtualcamera/Format.h"
 #include "aidl/android/companion/virtualcamera/LensFacing.h"
 #include "aidl/android/companion/virtualcamera/VirtualCameraConfiguration.h"
 #include "android/binder_auto_utils.h"
+#include "android/binder_interface_utils.h"
 #include "android/binder_libbinder.h"
 #include "android/binder_status.h"
 #include "binder/Status.h"
@@ -55,15 +57,13 @@
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
 using ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration;
 
-// TODO(b/301023410) Make camera id range configurable / dynamic
-// based on already registered devices.
-std::atomic_int VirtualCameraService::sNextId{1000};
-
 namespace {
 
+constexpr char kCameraIdPrefix[] = "v";
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kMaxFps = 60;
+constexpr int kTestCameraDefaultInputFps = 30;
 constexpr char kEnableTestCameraCmd[] = "enable_test_camera";
 constexpr char kDisableTestCameraCmd[] = "disable_test_camera";
 constexpr char kHelp[] = "help";
@@ -75,6 +75,9 @@
      Options:
        --camera_id=(ID) - override numerical ID for test camera instance
        --lens_facing=(front|back|external) - specifies lens facing for test camera instance
+       --input_fps=(fps) - specify input fps for test camera, valid values are from 1 to 1000
+       --sensor_orientation=(0|90|180|270) - Clockwise angle through which the output image 
+           needs to be rotated to be upright on the device screen in its native orientation
  * disable_test_camera
 )";
 constexpr char kCreateVirtualDevicePermission[] =
@@ -86,6 +89,9 @@
     "GL_EXT_YUV_target",
 };
 
+// Numerical portion for id to assign to next created camera.
+static std::atomic_int sNextIdNumericalPortion{1000};
+
 ndk::ScopedAStatus validateConfiguration(
     const VirtualCameraConfiguration& configuration) {
   if (configuration.supportedStreamConfigs.empty()) {
@@ -94,6 +100,13 @@
         Status::EX_ILLEGAL_ARGUMENT);
   }
 
+  if (configuration.virtualCameraCallback == nullptr) {
+    ALOGE("%s: Input configuration is missing virtual camera callback",
+          __func__);
+    return ndk::ScopedAStatus::fromServiceSpecificError(
+        Status::EX_ILLEGAL_ARGUMENT);
+  }
+
   for (const SupportedStreamConfiguration& config :
        configuration.supportedStreamConfigs) {
     if (!isFormatSupportedForInput(config.width, config.height,
@@ -181,7 +194,7 @@
   }
 
   return cmd;
-};
+}
 
 ndk::ScopedAStatus verifyRequiredEglExtensions() {
   EglDisplayContext context;
@@ -200,6 +213,11 @@
   return ndk::ScopedAStatus::ok();
 }
 
+std::string createCameraId(const int32_t deviceId) {
+  return kCameraIdPrefix + std::to_string(deviceId) + "_" +
+         std::to_string(sNextIdNumericalPortion++);
+}
+
 }  // namespace
 
 VirtualCameraService::VirtualCameraService(
@@ -213,13 +231,14 @@
     const ::ndk::SpAIBinder& token,
     const VirtualCameraConfiguration& configuration, const int32_t deviceId,
     bool* _aidl_return) {
-  return registerCamera(token, configuration, sNextId++, deviceId, _aidl_return);
+  return registerCamera(token, configuration, createCameraId(deviceId),
+                        deviceId, _aidl_return);
 }
 
 ndk::ScopedAStatus VirtualCameraService::registerCamera(
     const ::ndk::SpAIBinder& token,
-    const VirtualCameraConfiguration& configuration, const int cameraId,
-    const int32_t deviceId, bool* _aidl_return) {
+    const VirtualCameraConfiguration& configuration,
+    const std::string& cameraId, const int32_t deviceId, bool* _aidl_return) {
   if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
     ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
           getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -297,7 +316,7 @@
 }
 
 ndk::ScopedAStatus VirtualCameraService::getCameraId(
-    const ::ndk::SpAIBinder& token, int32_t* _aidl_return) {
+    const ::ndk::SpAIBinder& token, std::string* _aidl_return) {
   if (!mPermissionProxy.checkCallingPermission(kCreateVirtualDevicePermission)) {
     ALOGE("%s: caller (pid %d, uid %d) doesn't hold %s permission", __func__,
           getpid(), getuid(), kCreateVirtualDevicePermission);
@@ -389,13 +408,12 @@
     return STATUS_OK;
   }
 
-  std::optional<int> cameraId;
+  std::optional<std::string> cameraId;
   auto it = options.find("camera_id");
   if (it != options.end()) {
-    cameraId = parseInt(it->second);
+    cameraId = it->second;
     if (!cameraId.has_value()) {
-      dprintf(err, "Invalid camera_id: %s\n, must be number > 0",
-              it->second.c_str());
+      dprintf(err, "Invalid camera_id: %s", it->second.c_str());
       return STATUS_BAD_VALUE;
     }
   }
@@ -411,6 +429,43 @@
     }
   }
 
+  std::optional<int> inputFps;
+  it = options.find("input_fps");
+  if (it != options.end()) {
+    inputFps = parseInt(it->second);
+    if (!inputFps.has_value() || inputFps.value() < 1 ||
+        inputFps.value() > 1000) {
+      dprintf(err, "Invalid input fps: %s\n, must be integer in <1,1000> range.",
+              it->second.c_str());
+      return STATUS_BAD_VALUE;
+    }
+  }
+
+  std::optional<SensorOrientation> sensorOrientation;
+  std::optional<int> sensorOrientationInt;
+  it = options.find("sensor_orientation");
+  if (it != options.end()) {
+    sensorOrientationInt = parseInt(it->second);
+    switch (sensorOrientationInt.value_or(0)) {
+      case 0:
+        sensorOrientation = SensorOrientation::ORIENTATION_0;
+        break;
+      case 90:
+        sensorOrientation = SensorOrientation::ORIENTATION_90;
+        break;
+      case 180:
+        sensorOrientation = SensorOrientation::ORIENTATION_180;
+        break;
+      case 270:
+        sensorOrientation = SensorOrientation::ORIENTATION_270;
+        break;
+      default:
+        dprintf(err, "Invalid sensor rotation: %s\n, must be 0, 90, 180 or 270.",
+                it->second.c_str());
+        return STATUS_BAD_VALUE;
+    }
+  }
+
   sp<BBinder> token = sp<BBinder>::make();
   mTestCameraToken.set(AIBinder_fromPlatformBinder(token));
 
@@ -418,10 +473,16 @@
   VirtualCameraConfiguration configuration;
   configuration.supportedStreamConfigs.push_back({.width = kVgaWidth,
                                                   .height = kVgaHeight,
-                                                  Format::YUV_420_888,
+                                                  Format::RGBA_8888,
                                                   .maxFps = kMaxFps});
   configuration.lensFacing = lensFacing.value_or(LensFacing::EXTERNAL);
-  registerCamera(mTestCameraToken, configuration, cameraId.value_or(sNextId++),
+  configuration.sensorOrientation =
+      sensorOrientation.value_or(SensorOrientation::ORIENTATION_0);
+  configuration.virtualCameraCallback =
+      ndk::SharedRefBase::make<VirtualCameraTestInstance>(
+          inputFps.value_or(kTestCameraDefaultInputFps));
+  registerCamera(mTestCameraToken, configuration,
+                 cameraId.value_or(std::to_string(sNextIdNumericalPortion++)),
                  kDefaultDeviceId, &ret);
   if (ret) {
     dprintf(out, "Successfully registered test camera %s\n",
diff --git a/services/camera/virtualcamera/VirtualCameraService.h b/services/camera/virtualcamera/VirtualCameraService.h
index f04acb5..4ef01c7 100644
--- a/services/camera/virtualcamera/VirtualCameraService.h
+++ b/services/camera/virtualcamera/VirtualCameraService.h
@@ -50,15 +50,17 @@
       const ::ndk::SpAIBinder& token,
       const ::aidl::android::companion::virtualcamera::VirtualCameraConfiguration&
           configuration,
-      int cameraId, int32_t deviceId, bool* _aidl_return) EXCLUDES(mLock);
+      const std::string& cameraId, int32_t deviceId, bool* _aidl_return)
+      EXCLUDES(mLock);
 
   // Unregisters camera corresponding to the binder token.
   ndk::ScopedAStatus unregisterCamera(const ::ndk::SpAIBinder& token) override
       EXCLUDES(mLock);
 
   // Returns the camera id corresponding to the binder token.
-  ndk::ScopedAStatus getCameraId(
-      const ::ndk::SpAIBinder& token, int32_t* _aidl_return) override EXCLUDES(mLock);
+  ndk::ScopedAStatus getCameraId(const ::ndk::SpAIBinder& token,
+                                 std::string* _aidl_return) override
+      EXCLUDES(mLock);
 
   // Returns VirtualCameraDevice corresponding to binder token or nullptr if
   // there's no camera asociated with the token.
@@ -101,9 +103,6 @@
 
   // Local binder token for test camera instance, or nullptr if there's none.
   ::ndk::SpAIBinder mTestCameraToken;
-
-  // Numerical id to assign to next created camera.
-  static std::atomic_int sNextId;
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index 28fa495..88929cc 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -66,14 +66,12 @@
 #include "util/EglProgram.h"
 #include "util/JpegUtil.h"
 #include "util/MetadataUtil.h"
-#include "util/TestPatternHelper.h"
 #include "util/Util.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
 
-using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
 using ::aidl::android::hardware::camera::common::Status;
@@ -88,7 +86,6 @@
 using ::aidl::android::hardware::camera::device::Stream;
 using ::aidl::android::hardware::camera::device::StreamBuffer;
 using ::aidl::android::hardware::camera::device::StreamConfiguration;
-using ::aidl::android::hardware::camera::device::StreamRotation;
 using ::aidl::android::hardware::common::fmq::MQDescriptor;
 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
 using ::aidl::android::hardware::graphics::common::BufferUsage;
@@ -213,6 +210,27 @@
   return Resolution(inputConfig.width, inputConfig.height);
 }
 
+std::optional<Resolution> resolutionFromSurface(const sp<Surface> surface) {
+  Resolution res{0, 0};
+  if (surface == nullptr) {
+    ALOGE("%s: Cannot get resolution from null surface", __func__);
+    return std::nullopt;
+  }
+
+  int status = surface->query(NATIVE_WINDOW_WIDTH, &res.width);
+  if (status != NO_ERROR) {
+    ALOGE("%s: Failed to get width from surface", __func__);
+    return std::nullopt;
+  }
+
+  status = surface->query(NATIVE_WINDOW_HEIGHT, &res.height);
+  if (status != NO_ERROR) {
+    ALOGE("%s: Failed to get height from surface", __func__);
+    return std::nullopt;
+  }
+  return res;
+}
+
 std::optional<SupportedStreamConfiguration> pickInputConfigurationForStreams(
     const std::vector<Stream>& requestedStreams,
     const std::vector<SupportedStreamConfiguration>& supportedInputConfigs) {
@@ -293,13 +311,13 @@
 
 ndk::ScopedAStatus VirtualCameraSession::close() {
   ALOGV("%s", __func__);
-
-  if (mVirtualCameraClientCallback != nullptr) {
-    mVirtualCameraClientCallback->onStreamClosed(/*streamId=*/0);
-  }
-
   {
     std::lock_guard<std::mutex> lock(mLock);
+
+    if (mVirtualCameraClientCallback != nullptr) {
+      mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
+    }
+
     if (mRenderThread != nullptr) {
       mRenderThread->stop();
       mRenderThread = nullptr;
@@ -340,6 +358,7 @@
   }
 
   sp<Surface> inputSurface = nullptr;
+  int inputStreamId = -1;
   std::optional<SupportedStreamConfiguration> inputConfig;
   {
     std::lock_guard<std::mutex> lock(mLock);
@@ -359,16 +378,49 @@
           __func__, in_requestedConfiguration.toString().c_str());
       return cameraStatus(Status::ILLEGAL_ARGUMENT);
     }
-    if (mRenderThread == nullptr) {
-      // If there's no client callback, start camera in test mode.
-      const bool testMode = mVirtualCameraClientCallback == nullptr;
-      mRenderThread = std::make_unique<VirtualCameraRenderThread>(
-          mSessionContext, resolutionFromInputConfig(*inputConfig),
-          virtualCamera->getMaxInputResolution(), mCameraDeviceCallback,
-          testMode);
-      mRenderThread->start();
-      inputSurface = mRenderThread->getInputSurface();
+
+    if (mRenderThread != nullptr) {
+      // If there's already a render thread, it means this is not a first
+      // configuration call. If the surface has the same resolution and pixel
+      // format as the picked config, we don't need to do anything, the current
+      // render thread is capable of serving new set of configuration. However
+      // if it differens, we need to discard the current surface and
+      // reinitialize the render thread.
+
+      std::optional<Resolution> currentInputResolution =
+          resolutionFromSurface(mRenderThread->getInputSurface());
+      if (currentInputResolution.has_value() &&
+          *currentInputResolution == resolutionFromInputConfig(*inputConfig)) {
+        ALOGI(
+            "%s: Newly configured set of streams matches existing client "
+            "surface (%dx%d)",
+            __func__, currentInputResolution->width,
+            currentInputResolution->height);
+        return ndk::ScopedAStatus::ok();
+      }
+
+      if (mVirtualCameraClientCallback != nullptr) {
+        mVirtualCameraClientCallback->onStreamClosed(mCurrentInputStreamId);
+      }
+
+      ALOGV(
+          "%s: Newly requested output streams are not suitable for "
+          "pre-existing surface (%dx%d), creating new surface (%dx%d)",
+          __func__, currentInputResolution->width,
+          currentInputResolution->height, inputConfig->width,
+          inputConfig->height);
+
+      mRenderThread->flush();
+      mRenderThread->stop();
     }
+
+    mRenderThread = std::make_unique<VirtualCameraRenderThread>(
+        mSessionContext, resolutionFromInputConfig(*inputConfig),
+        virtualCamera->getMaxInputResolution(), mCameraDeviceCallback);
+    mRenderThread->start();
+    inputSurface = mRenderThread->getInputSurface();
+    inputStreamId = mCurrentInputStreamId =
+        virtualCamera->allocateInputStreamId();
   }
 
   if (mVirtualCameraClientCallback != nullptr && inputSurface != nullptr) {
@@ -376,7 +428,7 @@
     // support for multiple input streams is implemented. For now we always
     // create single texture.
     mVirtualCameraClientCallback->onStreamConfigured(
-        /*streamId=*/0, aidl::android::view::Surface(inputSurface.get()),
+        inputStreamId, aidl::android::view::Surface(inputSurface.get()),
         inputConfig->width, inputConfig->height, inputConfig->pixelFormat);
   }
 
@@ -523,6 +575,7 @@
 
   std::shared_ptr<ICameraDeviceCallback> cameraCallback = nullptr;
   RequestSettings requestSettings;
+  int currentInputStreamId;
   {
     std::lock_guard<std::mutex> lock(mLock);
 
@@ -541,6 +594,7 @@
     requestSettings = createSettingsFromMetadata(mCurrentRequestMetadata);
 
     cameraCallback = mCameraDeviceCallback;
+    currentInputStreamId = mCurrentInputStreamId;
   }
 
   if (cameraCallback == nullptr) {
@@ -578,7 +632,7 @@
 
   if (mVirtualCameraClientCallback != nullptr) {
     auto status = mVirtualCameraClientCallback->onProcessCaptureRequest(
-        /*streamId=*/0, request.frameNumber);
+        currentInputStreamId, request.frameNumber);
     if (!status.isOk()) {
       ALOGE(
           "Failed to invoke onProcessCaptureRequest client callback for frame "
diff --git a/services/camera/virtualcamera/VirtualCameraSession.h b/services/camera/virtualcamera/VirtualCameraSession.h
index 556314f..c2044b9 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.h
+++ b/services/camera/virtualcamera/VirtualCameraSession.h
@@ -143,6 +143,8 @@
       GUARDED_BY(mLock);
 
   std::unique_ptr<VirtualCameraRenderThread> mRenderThread GUARDED_BY(mLock);
+
+  int mCurrentInputStreamId GUARDED_BY(mLock);
 };
 
 }  // namespace virtualcamera
diff --git a/services/camera/virtualcamera/VirtualCameraSessionContext.cc b/services/camera/virtualcamera/VirtualCameraSessionContext.cc
index 284ad05..aab2d0d 100644
--- a/services/camera/virtualcamera/VirtualCameraSessionContext.cc
+++ b/services/camera/virtualcamera/VirtualCameraSessionContext.cc
@@ -129,7 +129,8 @@
           streamId);
     return std::optional<Stream>();
   }
-  return {it->second->getStreamConfig()};
+  VirtualCameraStream& stream = *it->second;
+  return {stream.getStreamConfig()};
 }
 
 std::shared_ptr<AHardwareBuffer> VirtualCameraSessionContext::fetchHardwareBuffer(
@@ -141,7 +142,8 @@
           streamId);
     return nullptr;
   }
-  return it->second->getHardwareBuffer(bufferId);
+  VirtualCameraStream& stream = *it->second;
+  return stream.getHardwareBuffer(bufferId);
 }
 
 std::shared_ptr<EglFrameBuffer>
@@ -154,7 +156,8 @@
           streamId);
     return nullptr;
   }
-  return it->second->getEglFrameBuffer(eglDisplay, bufferId);
+  VirtualCameraStream& stream = *it->second;
+  return stream.getEglFrameBuffer(eglDisplay, bufferId);
 }
 
 std::set<int> VirtualCameraSessionContext::getStreamIds() const {
diff --git a/services/camera/virtualcamera/VirtualCameraStream.cc b/services/camera/virtualcamera/VirtualCameraStream.cc
index 03da171..fad6cac 100644
--- a/services/camera/virtualcamera/VirtualCameraStream.cc
+++ b/services/camera/virtualcamera/VirtualCameraStream.cc
@@ -26,8 +26,6 @@
 
 #include "EGL/egl.h"
 #include "aidl/android/hardware/camera/device/Stream.h"
-#include "aidl/android/hardware/camera/device/StreamBuffer.h"
-#include "aidl/android/hardware/graphics/common/PixelFormat.h"
 #include "aidlcommonsupport/NativeHandle.h"
 #include "android/hardware_buffer.h"
 #include "cutils/native_handle.h"
@@ -39,52 +37,33 @@
 namespace virtualcamera {
 
 using ::aidl::android::hardware::camera::device::Stream;
-using ::aidl::android::hardware::camera::device::StreamBuffer;
 using ::aidl::android::hardware::common::NativeHandle;
-using ::aidl::android::hardware::graphics::common::PixelFormat;
 
 namespace {
 
-sp<GraphicBuffer> createBlobGraphicBuffer(GraphicBufferMapper& mapper,
-                                          buffer_handle_t bufferHandle) {
-  uint64_t allocationSize;
-  uint64_t usage;
-  uint64_t layerCount;
-  if (mapper.getAllocationSize(bufferHandle, &allocationSize) != NO_ERROR ||
-      mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
-      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
-    ALOGE("Error fetching metadata for the imported BLOB buffer handle.");
-    return nullptr;
-  }
-
-  return sp<GraphicBuffer>::make(
-      bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE,
-      allocationSize, /*height=*/1, static_cast<int>(ui::PixelFormat::BLOB),
-      layerCount, usage, 0);
-}
-
-sp<GraphicBuffer> createYCbCr420GraphicBuffer(GraphicBufferMapper& mapper,
-                                              buffer_handle_t bufferHandle) {
+sp<GraphicBuffer> createGraphicBuffer(GraphicBufferMapper& mapper,
+                                      const buffer_handle_t bufferHandle) {
   uint64_t width;
   uint64_t height;
   uint64_t usage;
   uint64_t layerCount;
+  ui::PixelFormat pixelFormat;
   if (mapper.getWidth(bufferHandle, &width) != NO_ERROR ||
       mapper.getHeight(bufferHandle, &height) != NO_ERROR ||
       mapper.getUsage(bufferHandle, &usage) != NO_ERROR ||
-      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR) {
+      mapper.getLayerCount(bufferHandle, &layerCount) != NO_ERROR ||
+      mapper.getPixelFormatRequested(bufferHandle, &pixelFormat) != NO_ERROR) {
     ALOGE("Error fetching metadata for the imported YCbCr420 buffer handle.");
     return nullptr;
   }
 
   return sp<GraphicBuffer>::make(
       bufferHandle, GraphicBuffer::HandleWrapMethod::TAKE_HANDLE, width, height,
-      static_cast<int>(ui::PixelFormat::YCBCR_420_888), /*layers=*/1, usage,
-      width);
+      static_cast<int>(pixelFormat), layerCount, usage, width);
 }
 
 std::shared_ptr<AHardwareBuffer> importBufferInternal(
-    const NativeHandle& aidlHandle, const Stream& streamConfig) {
+    const NativeHandle& aidlHandle) {
   if (aidlHandle.fds.empty()) {
     ALOGE("Empty handle - nothing to import");
     return nullptr;
@@ -103,12 +82,9 @@
     return nullptr;
   }
 
-  sp<GraphicBuffer> buf =
-      streamConfig.format == PixelFormat::BLOB
-          ? createBlobGraphicBuffer(mapper, bufferHandle)
-          : createYCbCr420GraphicBuffer(mapper, bufferHandle);
+  sp<GraphicBuffer> buf = createGraphicBuffer(mapper, bufferHandle);
 
-  if (buf->initCheck() != NO_ERROR) {
+  if (buf == nullptr || buf->initCheck() != NO_ERROR) {
     ALOGE("Imported graphic buffer is not correcly initialized.");
     return nullptr;
   }
@@ -128,7 +104,7 @@
 
 std::shared_ptr<AHardwareBuffer> VirtualCameraStream::importBuffer(
     const ::aidl::android::hardware::camera::device::StreamBuffer& buffer) {
-  auto hwBufferPtr = importBufferInternal(buffer.buffer, mStreamConfig);
+  auto hwBufferPtr = importBufferInternal(buffer.buffer);
   if (hwBufferPtr != nullptr) {
     std::lock_guard<std::mutex> lock(mLock);
     mBuffers.emplace(std::piecewise_construct,
diff --git a/services/camera/virtualcamera/VirtualCameraTestInstance.cc b/services/camera/virtualcamera/VirtualCameraTestInstance.cc
new file mode 100644
index 0000000..ff4a2d8
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraTestInstance.cc
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// #define LOG_NDEBUG 0
+#define LOG_TAG "VirtualCameraTestInstance"
+
+#include "VirtualCameraTestInstance.h"
+
+#include <atomic>
+#include <chrono>
+#include <memory>
+#include <mutex>
+#include <ratio>
+#include <thread>
+
+#include "GLES/gl.h"
+#include "android/binder_auto_utils.h"
+#include "android/native_window.h"
+#include "log/log.h"
+#include "util/EglDisplayContext.h"
+#include "util/EglProgram.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+using ::aidl::android::companion::virtualcamera::Format;
+using ::aidl::android::view::Surface;
+using ::ndk::ScopedAStatus;
+
+namespace {
+
+std::shared_ptr<ANativeWindow> nativeWindowFromSurface(const Surface& surface) {
+  ANativeWindow* nativeWindow = surface.get();
+  if (nativeWindow != nullptr) {
+    ANativeWindow_acquire(nativeWindow);
+  }
+  return std::shared_ptr<ANativeWindow>(nativeWindow, ANativeWindow_release);
+}
+
+std::chrono::nanoseconds getCurrentTimestamp() {
+  return std::chrono::duration_cast<std::chrono::nanoseconds>(
+      std::chrono::steady_clock::now().time_since_epoch());
+}
+
+}  // namespace
+
+TestPatternRenderer::TestPatternRenderer(
+    std::shared_ptr<ANativeWindow> nativeWindow, int fps)
+    : mFps(fps), mNativeWindow(nativeWindow) {
+}
+
+void TestPatternRenderer::start() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mRunning.exchange(true, std::memory_order_relaxed)) {
+    ALOGW("Render thread already started.");
+    return;
+  }
+  mThread =
+      std::thread(&TestPatternRenderer::renderThreadLoop, this, mNativeWindow);
+}
+
+void TestPatternRenderer::stop() {
+  std::lock_guard<std::mutex> lock(mLock);
+  if (!mRunning.exchange(false, std::memory_order_relaxed)) {
+    ALOGW("Render thread already stopped.");
+    return;
+  }
+  mThread.detach();
+  mRunning = false;
+}
+
+void TestPatternRenderer::renderThreadLoop(
+    std::shared_ptr<ANativeWindow> nativeWindow) {
+  // Prevent destruction of this instance until the thread terminates.
+  std::shared_ptr<TestPatternRenderer> thiz = shared_from_this();
+
+  ALOGV("Starting test client render loop");
+
+  EglDisplayContext eglDisplayContext(nativeWindow);
+  EglTestPatternProgram testPatternProgram;
+
+  const std::chrono::nanoseconds frameDuration(
+      static_cast<uint64_t>(1e9 / mFps));
+
+  std::chrono::nanoseconds lastFrameTs(0);
+  int frameNumber = 0;
+  while (mRunning) {
+    // Wait for appropriate amount of time to meet configured FPS.
+    std::chrono::nanoseconds ts = getCurrentTimestamp();
+    std::chrono::nanoseconds currentDuration = ts - lastFrameTs;
+    if (currentDuration < frameDuration) {
+      std::this_thread::sleep_for(frameDuration - currentDuration);
+    }
+
+    // Render the test pattern and update timestamp.
+    testPatternProgram.draw(ts);
+    eglDisplayContext.swapBuffers();
+    lastFrameTs = getCurrentTimestamp();
+  }
+
+  ALOGV("Terminating test client render loop");
+}
+
+VirtualCameraTestInstance::VirtualCameraTestInstance(const int fps)
+    : mFps(fps) {
+}
+
+ScopedAStatus VirtualCameraTestInstance::onStreamConfigured(
+    const int32_t streamId, const Surface& surface, const int32_t width,
+    const int32_t height, const Format pixelFormat) {
+  ALOGV("%s: streamId %d, %dx%d pixFmt=%s", __func__, streamId, width, height,
+        toString(pixelFormat).c_str());
+
+  auto renderer = std::make_shared<TestPatternRenderer>(
+      nativeWindowFromSurface(surface), mFps);
+
+  std::lock_guard<std::mutex> lock(mLock);
+  if (mInputRenderers.try_emplace(streamId, renderer).second) {
+    renderer->start();
+  } else {
+    ALOGE(
+        "%s: Input stream with id %d is already active, ignoring "
+        "onStreamConfigured call",
+        __func__, streamId);
+  }
+
+  return ScopedAStatus::ok();
+}
+
+ScopedAStatus VirtualCameraTestInstance::onProcessCaptureRequest(
+    const int32_t /*in_streamId*/, const int32_t /*in_frameId*/) {
+  return ScopedAStatus::ok();
+}
+
+ScopedAStatus VirtualCameraTestInstance::onStreamClosed(const int32_t streamId) {
+  ALOGV("%s: streamId %d", __func__, streamId);
+
+  std::shared_ptr<TestPatternRenderer> renderer;
+  {
+    std::lock_guard<std::mutex> lock(mLock);
+    auto it = mInputRenderers.find(streamId);
+    if (it != mInputRenderers.end()) {
+      renderer = std::move(it->second);
+      mInputRenderers.erase(it);
+    }
+  }
+  if (renderer != nullptr) {
+    renderer->stop();
+  }
+  return ScopedAStatus::ok();
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
diff --git a/services/camera/virtualcamera/VirtualCameraTestInstance.h b/services/camera/virtualcamera/VirtualCameraTestInstance.h
new file mode 100644
index 0000000..c130645
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraTestInstance.h
@@ -0,0 +1,93 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
+
+#include <atomic>
+#include <map>
+#include <memory>
+#include <thread>
+
+#include "aidl/android/companion/virtualcamera/BnVirtualCameraCallback.h"
+#include "android/native_window.h"
+#include "utils/Mutex.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Wraps render loop run in a dedicated thread, rendering test pattern to
+// provided Surface (a.k.a. native window) at configured FPS.
+class TestPatternRenderer
+    : public std::enable_shared_from_this<TestPatternRenderer> {
+ public:
+  TestPatternRenderer(std::shared_ptr<ANativeWindow> nativeWindow, int fps);
+
+  // Start rendering.
+  void start() EXCLUDES(mLock);
+
+  // Stop rendering.
+  // Call returns immediatelly, render thread might take some time (1 frame)
+  // to finish rendering and terminate the thread.
+  void stop() EXCLUDES(mLock);
+
+ private:
+  // Render thread entry point.
+  void renderThreadLoop(std::shared_ptr<ANativeWindow> nativeWindow);
+
+  const int mFps;
+
+  std::shared_ptr<ANativeWindow> mNativeWindow;
+
+  std::mutex mLock;
+  std::atomic_bool mRunning;
+  std::thread mThread GUARDED_BY(mLock);
+};
+
+// VirtualCamera callback implementation for test camera.
+//
+// For every configure call, starts rendering of test pattern on provided surface.
+class VirtualCameraTestInstance
+    : public aidl::android::companion::virtualcamera::BnVirtualCameraCallback {
+ public:
+  explicit VirtualCameraTestInstance(int fps = 30);
+
+  ::ndk::ScopedAStatus onStreamConfigured(
+      int32_t streamId, const ::aidl::android::view::Surface& surface,
+      int32_t width, int32_t height,
+      ::aidl::android::companion::virtualcamera::Format pixelFormat) override
+      EXCLUDES(mLock);
+
+  ::ndk::ScopedAStatus onProcessCaptureRequest(int32_t in_streamId,
+                                               int32_t in_frameId) override;
+
+  ::ndk::ScopedAStatus onStreamClosed(int32_t streamId) override EXCLUDES(mLock);
+
+ private:
+  const int mFps;
+
+  std::mutex mLock;
+  // Map maintaining streamId -> TestPatternRenderer mapping for active
+  // input streams.
+  std::map<int, std::shared_ptr<TestPatternRenderer>> mInputRenderers
+      GUARDED_BY(mLock);
+};
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERATESTINSTANCE_H
diff --git a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
index 1bd99be..2f1e2a9 100644
--- a/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
+++ b/services/camera/virtualcamera/aidl/android/companion/virtualcamera/IVirtualCameraService.aidl
@@ -41,5 +41,5 @@
      * Returns the camera id for a given binder token. Note that this id corresponds to the id of
      * the camera device in the camera framework.
      */
-    int getCameraId(in IBinder token);
+    @utf8InCpp String getCameraId(in IBinder token);
 }
diff --git a/services/camera/virtualcamera/main.cc b/services/camera/virtualcamera/main.cc
index 43b0219..3db9d9c 100644
--- a/services/camera/virtualcamera/main.cc
+++ b/services/camera/virtualcamera/main.cc
@@ -38,7 +38,7 @@
 }  // namespace
 
 int main() {
-  ALOGI("CameraProvider: virtual webcam service is starting.");
+  ALOGI("virtual_camera service is starting.");
 
   ABinderProcess_setThreadPoolMaxThreadCount(HWBINDER_THREAD_COUNT);
 
diff --git a/services/camera/virtualcamera/tests/EglUtilTest.cc b/services/camera/virtualcamera/tests/EglUtilTest.cc
index 589e312..813be75 100644
--- a/services/camera/virtualcamera/tests/EglUtilTest.cc
+++ b/services/camera/virtualcamera/tests/EglUtilTest.cc
@@ -55,6 +55,11 @@
 };
 
 TEST_F(EglTest, EglTestPatternProgramSuccessfulInit) {
+  if (!isGlExtensionSupported(kGlExtYuvTarget)) {
+    GTEST_SKIP() << "Skipping test because of missing required GL extension "
+                 << kGlExtYuvTarget;
+  }
+
   EglTestPatternProgram eglTestPatternProgram;
 
   // Verify the shaders compiled and linked successfully.
diff --git a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
index 3fe7c11..32cd23f 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraDeviceTest.cc
@@ -54,11 +54,9 @@
 using metadata_stream_t =
     camera_metadata_enum_android_scaler_available_stream_configurations_t;
 
-constexpr int kCameraId = 42;
+constexpr char kCameraId[] = "42";
 constexpr int kQvgaWidth = 320;
 constexpr int kQvgaHeight = 240;
-constexpr int k360pWidth = 640;
-constexpr int k360pHeight = 360;
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kHdWidth = 1280;
diff --git a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
index f1b2a92..d4bc6de 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraProviderTest.cc
@@ -50,13 +50,12 @@
 using ::testing::Not;
 using ::testing::Return;
 
+constexpr char kCameraId[] = "42";
 constexpr int kVgaWidth = 640;
 constexpr int kVgaHeight = 480;
 constexpr int kMaxFps = 30;
-constexpr int kCameraId = 9999;
 constexpr int kDefaultDeviceId = 0;
-constexpr char kVirtualCameraNameRegex[] =
-    "device@[0-9]+\\.[0-9]+/virtual/[0-9]+";
+constexpr char kVirtualCameraNameRegex[] = "device@[0-9]+\\.[0-9]+/virtual/.+";
 
 class MockCameraProviderCallback : public BnCameraProviderCallback {
  public:
diff --git a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
index 5927b05..719f64d 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraServiceTest.cc
@@ -15,9 +15,11 @@
  */
 
 #include <algorithm>
+#include <cstdint>
 #include <cstdio>
 #include <iterator>
 #include <memory>
+#include <optional>
 #include <regex>
 
 #include "VirtualCameraService.h"
@@ -75,6 +77,17 @@
 
 const VirtualCameraConfiguration kEmptyVirtualCameraConfiguration;
 
+class MockVirtualCameraCallback : public BnVirtualCameraCallback {
+ public:
+  MOCK_METHOD(ndk::ScopedAStatus, onStreamConfigured,
+              (int32_t, const ::aidl::android::view::Surface&, int, int,
+               ::aidl::android::companion::virtualcamera::Format pixelFormat),
+              (override));
+  MOCK_METHOD(ndk::ScopedAStatus, onProcessCaptureRequest, (int32_t, int32_t),
+              (override));
+  MOCK_METHOD(ndk::ScopedAStatus, onStreamClosed, (int32_t), (override));
+};
+
 VirtualCameraConfiguration createConfiguration(const int width, const int height,
                                                const Format format,
                                                const int maxFps) {
@@ -85,6 +98,8 @@
                                                   .maxFps = maxFps});
   configuration.sensorOrientation = kSensorOrientation;
   configuration.lensFacing = kLensFacing;
+  configuration.virtualCameraCallback =
+      ndk::SharedRefBase::make<MockVirtualCameraCallback>();
   return configuration;
 }
 
@@ -177,6 +192,16 @@
     return getLensFacing(metadata);
   }
 
+  std::optional<int32_t> getCameraSensorOrienation(const std::string& id) {
+    std::shared_ptr<VirtualCameraDevice> camera = mCameraProvider->getCamera(id);
+    if (camera == nullptr) {
+      return std::nullopt;
+    }
+    CameraMetadata metadata;
+    camera->getCameraCharacteristics(&metadata);
+    return getSensorOrientation(metadata);
+  }
+
  protected:
   std::shared_ptr<VirtualCameraService> mCameraService;
   std::shared_ptr<VirtualCameraProvider> mCameraProvider;
@@ -247,6 +272,24 @@
   EXPECT_THAT(getCameraIds(), IsEmpty());
 }
 
+TEST_F(VirtualCameraServiceTest,
+       ConfigurationWithoutVirtualCameraCallbackFails) {
+  sp<BBinder> token = sp<BBinder>::make();
+  ndk::SpAIBinder ndkToken(AIBinder_fromPlatformBinder(token));
+  bool aidlRet;
+
+  VirtualCameraConfiguration config =
+      createConfiguration(kVgaWidth, kVgaHeight, Format::RGBA_8888, kMaxFps);
+  config.virtualCameraCallback = nullptr;
+
+  ASSERT_FALSE(mCameraService
+                   ->registerCamera(ndkToken, config, kDefaultDeviceId, &aidlRet)
+                   .isOk());
+
+  EXPECT_FALSE(aidlRet);
+  EXPECT_THAT(getCameraIds(), IsEmpty());
+}
+
 TEST_F(VirtualCameraServiceTest, ConfigurationWithUnsupportedPixelFormatFails) {
   bool aidlRet;
 
@@ -358,7 +401,7 @@
 }
 
 TEST_F(VirtualCameraServiceTest, GetIdWithoutPermissionFails) {
-  int32_t aidlRet;
+  std::string aidlRet;
   EXPECT_CALL(mMockPermissionsProxy,
               checkCallingPermission(kCreateVirtualDevicePermissions))
       .WillOnce(Return(false));
@@ -413,11 +456,13 @@
 }
 
 TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithId) {
-  EXPECT_THAT(execute_shell_command("enable_test_camera --camera_id=12345"),
-              Eq(NO_ERROR));
+  EXPECT_THAT(
+      execute_shell_command("enable_test_camera --camera_id=hello12345"),
+      Eq(NO_ERROR));
 
   std::vector<std::string> cameraIdsAfterEnable = getCameraIds();
-  EXPECT_THAT(cameraIdsAfterEnable, ElementsAre("device@1.1/virtual/12345"));
+  EXPECT_THAT(cameraIdsAfterEnable,
+              ElementsAre("device@1.1/virtual/hello12345"));
 
   EXPECT_THAT(execute_shell_command("disable_test_camera"), Eq(NO_ERROR));
 
@@ -426,9 +471,8 @@
 }
 
 TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidId) {
-  EXPECT_THAT(
-      execute_shell_command("enable_test_camera --camera_id=NotNumericalId"),
-      Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --camera_id="),
+              Eq(STATUS_BAD_VALUE));
 }
 
 TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithUnknownCommand) {
@@ -456,6 +500,41 @@
               Eq(STATUS_BAD_VALUE));
 }
 
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInputFps) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=15"),
+              Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithInvalidInputFps) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=1001"),
+              Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=0"),
+              Eq(STATUS_BAD_VALUE));
+  EXPECT_THAT(execute_shell_command("enable_test_camera --input_fps=foo"),
+              Eq(STATUS_BAD_VALUE));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithSensorOrientation90) {
+  EXPECT_THAT(
+      execute_shell_command("enable_test_camera --sensor_orientation=90"),
+      Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+  EXPECT_THAT(getCameraSensorOrienation(cameraIds[0]), Optional(Eq(90)));
+}
+
+TEST_F(VirtualCameraServiceTest, TestCameraShellCmdWithSensorOrientationNoArgs) {
+  EXPECT_THAT(execute_shell_command("enable_test_camera"), Eq(NO_ERROR));
+
+  std::vector<std::string> cameraIds = getCameraIds();
+  ASSERT_THAT(cameraIds, SizeIs(1));
+  EXPECT_THAT(getCameraSensorOrienation(cameraIds[0]), Optional(Eq(0)));
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
index 671e031..a9eb413 100644
--- a/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
+++ b/services/camera/virtualcamera/tests/VirtualCameraSessionTest.cc
@@ -37,6 +37,7 @@
 namespace virtualcamera {
 namespace {
 
+constexpr char kCameraId[] = "42";
 constexpr int kQvgaWidth = 320;
 constexpr int kQvgaHeight = 240;
 constexpr int kVgaWidth = 640;
@@ -46,7 +47,6 @@
 constexpr int kMaxFps = 30;
 constexpr int kStreamId = 0;
 constexpr int kSecondStreamId = 1;
-constexpr int kCameraId = 42;
 constexpr int kDefaultDeviceId = 0;
 
 using ::aidl::android::companion::virtualcamera::BnVirtualCameraCallback;
@@ -379,6 +379,92 @@
           .isOk());
 }
 
+TEST_F(VirtualCameraSessionInputChoiceTest, reconfigureSwitchesInputStream) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // First configure QVGA stream.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kQvgaWidth, kQvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect QVGA input configuragion to be chosen.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kQvgaWidth, kQvgaHeight,
+                                 Format::RGBA_8888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+
+  // Reconfigure with additional VGA stream.
+  streamConfiguration.streams.push_back(
+      createStream(kStreamId + 1, kVgaWidth, kVgaHeight,
+                   PixelFormat::IMPLEMENTATION_DEFINED));
+
+  // Expect original surface to be discarded.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback, onStreamClosed(kStreamId));
+
+  // Expect SVGA input configuragion to be chosen.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId + 1, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
+TEST_F(VirtualCameraSessionInputChoiceTest,
+       reconfigureKeepsInputStreamIfUnchanged) {
+  // Create camera configured to support SVGA YUV input and RGB QVGA input.
+  auto virtualCameraSession = createSession(
+      {SupportedStreamConfiguration{.width = kSvgaWidth,
+                                    .height = kSvgaHeight,
+                                    .pixelFormat = Format::YUV_420_888,
+                                    .maxFps = kMaxFps},
+       SupportedStreamConfiguration{.width = kQvgaWidth,
+                                    .height = kQvgaHeight,
+                                    .pixelFormat = Format::RGBA_8888,
+                                    .maxFps = kMaxFps}});
+
+  // First configure SVGA stream.
+  StreamConfiguration streamConfiguration;
+  streamConfiguration.streams = {createStream(
+      kStreamId, kSvgaWidth, kSvgaHeight, PixelFormat::IMPLEMENTATION_DEFINED)};
+  std::vector<HalStream> halStreams;
+
+  // Expect SVGA input configuragion to be chosen.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback,
+              onStreamConfigured(kStreamId, _, kSvgaWidth, kSvgaHeight,
+                                 Format::YUV_420_888));
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+
+  // Reconfigure with VGA + QVA stream. Because we only allow downscaling,
+  // this will be matched to SVGA input resolution.
+  streamConfiguration.streams = {
+      createStream(kStreamId + 1, kVgaWidth, kVgaHeight,
+                   PixelFormat::IMPLEMENTATION_DEFINED),
+      createStream(kStreamId + 2, kVgaWidth, kVgaHeight,
+                   PixelFormat::IMPLEMENTATION_DEFINED)};
+
+  // Expect the onStreamConfigured callback not to be invoked, since the
+  // original Surface is still best fit for current output streams.
+  EXPECT_CALL(*mMockVirtualCameraClientCallback, onStreamConfigured).Times(0);
+  EXPECT_TRUE(
+      virtualCameraSession->configureStreams(streamConfiguration, &halStreams)
+          .isOk());
+}
+
 }  // namespace
 }  // namespace virtualcamera
 }  // namespace companion
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.cc b/services/camera/virtualcamera/util/EglDisplayContext.cc
index 6d343a2..ccd0d71 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.cc
+++ b/services/camera/virtualcamera/util/EglDisplayContext.cc
@@ -30,8 +30,9 @@
 namespace companion {
 namespace virtualcamera {
 
-EglDisplayContext::EglDisplayContext()
+EglDisplayContext::EglDisplayContext(std::shared_ptr<ANativeWindow> nativeWindow)
     : mEglDisplay(EGL_NO_DISPLAY),
+      mEglSurface(EGL_NO_SURFACE),
       mEglContext(EGL_NO_CONTEXT),
       mEglConfig(nullptr) {
   EGLBoolean result;
@@ -52,8 +53,12 @@
 
   EGLint numConfigs = 0;
   EGLint configAttribs[] = {
-      EGL_SURFACE_TYPE, EGL_PBUFFER_BIT, EGL_RENDERABLE_TYPE,
-      EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE, 8, EGL_BLUE_SIZE, 8,
+      EGL_SURFACE_TYPE,
+      nativeWindow == nullptr
+          ? EGL_PBUFFER_BIT  // Render into individual AHardwareBuffer
+          : EGL_WINDOW_BIT,  // Render into Surface (ANativeWindow)
+      EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE,
+      8, EGL_BLUE_SIZE, 8,
       // no alpha
       EGL_NONE};
 
@@ -72,6 +77,17 @@
     return;
   }
 
+  if (nativeWindow != nullptr) {
+    mEglSurface = eglCreateWindowSurface(mEglDisplay, mEglConfig,
+                                         nativeWindow.get(), NULL);
+    if (mEglSurface == EGL_NO_SURFACE) {
+      ALOGE("eglCreateWindowSurface error: %#x", eglGetError());
+    }
+  }
+
+  // EGL is a big state machine. Now that we have a configuration ready, we set
+  // this state machine to that configuration (we make it the "current"
+  // configuration).
   if (!makeCurrent()) {
     ALOGE(
         "Failed to set newly initialized EGLContext and EGLDisplay connection "
@@ -82,13 +98,16 @@
 }
 
 EglDisplayContext::~EglDisplayContext() {
-  if (mEglDisplay != EGL_NO_DISPLAY) {
-    eglTerminate(mEglDisplay);
+  eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
+  if (mEglSurface != EGL_NO_SURFACE) {
+    eglDestroySurface(mEglDisplay, mEglSurface);
   }
   if (mEglContext != EGL_NO_CONTEXT) {
     eglDestroyContext(mEglDisplay, mEglContext);
   }
-  eglReleaseThread();
+  if (mEglDisplay != EGL_NO_DISPLAY) {
+    eglTerminate(mEglDisplay);
+  }
 }
 
 EGLDisplay EglDisplayContext::getEglDisplay() const {
@@ -99,8 +118,14 @@
   return mEglContext != EGL_NO_CONTEXT && mEglDisplay != EGL_NO_DISPLAY;
 }
 
+void EglDisplayContext::swapBuffers() const {
+  if (mEglSurface != EGL_NO_SURFACE) {
+    eglSwapBuffers(mEglDisplay, mEglSurface);
+  }
+}
+
 bool EglDisplayContext::makeCurrent() {
-  if (!eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, mEglContext)) {
+  if (!eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
     ALOGE("eglMakeCurrent failed: %#x", eglGetError());
     return false;
   }
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.h b/services/camera/virtualcamera/util/EglDisplayContext.h
index 402ca3c..6dc3080 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.h
+++ b/services/camera/virtualcamera/util/EglDisplayContext.h
@@ -17,7 +17,10 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLDISPLAYCONTEXT_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLDISPLAYCONTEXT_H
 
+#include <memory>
+
 #include "EGL/egl.h"
+#include "system/window.h"
 
 namespace android {
 namespace companion {
@@ -30,7 +33,7 @@
 // out of scope.
 class EglDisplayContext {
  public:
-  EglDisplayContext();
+  EglDisplayContext(std::shared_ptr<ANativeWindow> nativeWindow = nullptr);
   ~EglDisplayContext();
 
   // Sets EGLDisplay & EGLContext for current thread.
@@ -44,8 +47,13 @@
   // EGLDisplay & EGLContext.
   bool isInitialized() const;
 
+  void swapBuffers() const;
+
  private:
+  std::shared_ptr<ANativeWindow> mNativeWindow;
+
   EGLDisplay mEglDisplay;
+  EGLSurface mEglSurface;
   EGLContext mEglContext;
   EGLConfig mEglConfig;
 };
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 7554a67..eda4169 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -35,19 +35,28 @@
 
 constexpr char kGlExtYuvTarget[] = "GL_EXT_YUV_target";
 
-constexpr char kIdentityVertexShader[] = R"(
-    attribute vec4 vPosition;
+constexpr char kJuliaFractalVertexShader[] = R"(#version 300 es
+    in vec4 aPosition;
+    in vec2 aTextureCoord;
+    out vec2 vFractalCoord;
+    out vec2 vUVCoord;
     void main() {
-      gl_Position = vPosition;
+      gl_Position = aPosition;
+      vUVCoord = aTextureCoord;
+      vFractalCoord = vec2(aTextureCoord.x - 0.5, aTextureCoord.y - 0.5) * 4.0;
     })";
 
-constexpr char kJuliaFractalFragmentShader[] = R"(
+constexpr char kJuliaFractalFragmentShader[] = R"(#version 300 es
+    #extension GL_EXT_YUV_target : require
     precision mediump float;
-    uniform vec2 uResolution;
-    uniform vec2 uC;
-    uniform vec2 uUV;
+
     const float kIter = 64.0;
 
+    in vec2 vFractalCoord;
+    in vec2 vUVCoord;
+    out vec4 fragColor;
+    uniform vec2 uC;
+
     vec2 imSq(vec2 n){
       return vec2(pow(n.x,2.0)-pow(n.y,2.0), 2.0*n.x*n.y);
     }
@@ -62,9 +71,8 @@
     }
 
     void main() {
-      vec2 uv = vec2(gl_FragCoord.x / uResolution.x - 0.5, gl_FragCoord.y / uResolution.y - 0.5);
-      float juliaVal = julia(uv * 4.0, uC);
-      gl_FragColor = vec4( juliaVal,uUV.x,uUV.y,0.0);
+      float juliaVal = julia(vFractalCoord, uC);
+      fragColor = vec4(yuv_2_rgb(vec3(juliaVal, vUVCoord.x, vUVCoord.y), itu_601_full_range), 0.0);
     })";
 
 constexpr char kExternalTextureVertexShader[] = R"(#version 300 es
@@ -88,8 +96,9 @@
       fragColor = texture(uTexture, vTextureCoord);
     })";
 
+// Shader to render a RGBA texture into a YUV buffer.
 constexpr char kExternalRgbaTextureFragmentShader[] = R"(#version 300 es
-    #extension GL_OES_EGL_image_external : require
+    #extension GL_OES_EGL_image_external_essl3 : require
     #extension GL_EXT_YUV_target : require
     precision mediump float;
     in vec2 vTextureCoord;
@@ -200,47 +209,46 @@
 }
 
 EglTestPatternProgram::EglTestPatternProgram() {
-  if (initialize(kIdentityVertexShader, kJuliaFractalFragmentShader)) {
+  if (initialize(kJuliaFractalVertexShader, kJuliaFractalFragmentShader)) {
     ALOGV("Successfully initialized EGL shaders for test pattern program.");
   } else {
     ALOGE("Test pattern EGL shader program initialization failed.");
   }
+
+  mCHandle = glGetUniformLocation(mProgram, "uC");
+  mPositionHandle = glGetAttribLocation(mProgram, "aPosition");
+  mTextureCoordHandle = glGetAttribLocation(mProgram, "aTextureCoord");
+
+  // Pass vertex array to draw.
+  glEnableVertexAttribArray(mPositionHandle);
+  // Prepare the triangle coordinate data.
+  glVertexAttribPointer(mPositionHandle, kCoordsPerVertex, GL_FLOAT, false,
+                        kSquareCoords.size(), kSquareCoords.data());
+
+  glEnableVertexAttribArray(mTextureCoordHandle);
+  glVertexAttribPointer(mTextureCoordHandle, 2, GL_FLOAT, false,
+                        kTextureCoords.size(), kTextureCoords.data());
 }
 
-bool EglTestPatternProgram::draw(int width, int height, int frameNumber) {
-  glViewport(0, 0, static_cast<GLsizei>(width), static_cast<GLsizei>(height));
-  checkEglError("glViewport");
+EglTestPatternProgram::~EglTestPatternProgram() {
+  if (mPositionHandle != -1) {
+    glDisableVertexAttribArray(mPositionHandle);
+  }
+  if (mTextureCoordHandle != -1) {
+    glDisableVertexAttribArray(mTextureCoordHandle);
+  }
+}
 
+bool EglTestPatternProgram::draw(const std::chrono::nanoseconds timestamp) {
   // Load compiled shader.
   glUseProgram(mProgram);
   checkEglError("glUseProgram");
 
-  // Compute point in complex plane corresponding to fractal for this frame number.
-  float time = float(frameNumber) / 120.0f;
+  float time = float(timestamp.count() / 1e9) / 10;
   const std::complex<float> c(std::sin(time) * 0.78f, std::cos(time) * 0.78f);
 
-  // Pass uniform values to the shader.
-  int resolutionHandle = glGetUniformLocation(mProgram, "uResolution");
-  checkEglError("glGetUniformLocation -> uResolution");
-  glUniform2f(resolutionHandle, static_cast<float>(width),
-              static_cast<float>(height));
-  checkEglError("glUniform2f -> uResolution");
-
   // Pass "C" constant value determining the Julia set to the shader.
-  int cHandle = glGetUniformLocation(mProgram, "uC");
-  glUniform2f(cHandle, c.imag(), c.real());
-
-  // Pass chroma value to the shader.
-  int uvHandle = glGetUniformLocation(mProgram, "uUV");
-  glUniform2f(uvHandle, (c.imag() + 1.f) / 2.f, (c.real() + 1.f) / 2.f);
-
-  // Pass vertex array to draw.
-  int positionHandle = glGetAttribLocation(mProgram, "vPosition");
-  glEnableVertexAttribArray(positionHandle);
-
-  // Prepare the triangle coordinate data.
-  glVertexAttribPointer(positionHandle, kCoordsPerVertex, GL_FLOAT, false,
-                        kSquareCoords.size(), kSquareCoords.data());
+  glUniform2f(mCHandle, c.imag(), c.real());
 
   // Draw triangle strip forming a square filling the viewport.
   glDrawElements(GL_TRIANGLES, kDrawOrder.size(), GL_UNSIGNED_BYTE,
diff --git a/services/camera/virtualcamera/util/EglProgram.h b/services/camera/virtualcamera/util/EglProgram.h
index c695cbb..cf93157 100644
--- a/services/camera/virtualcamera/util/EglProgram.h
+++ b/services/camera/virtualcamera/util/EglProgram.h
@@ -18,6 +18,7 @@
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLPROGRAM_H
 
 #include <array>
+#include <chrono>
 
 #include "GLES/gl.h"
 
@@ -45,8 +46,14 @@
 class EglTestPatternProgram : public EglProgram {
  public:
   EglTestPatternProgram();
+  virtual ~EglTestPatternProgram();
 
-  bool draw(int width, int height, int frameNumber);
+  bool draw(std::chrono::nanoseconds timestamp);
+
+ private:
+  int mPositionHandle = -1;
+  int mTextureCoordHandle = -1;
+  int mCHandle = -1;
 };
 
 // Shader program to  draw texture.
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index 9f26e19..be36ec4 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -15,21 +15,32 @@
  */
 
 // #define LOG_NDEBUG 0
+#include <chrono>
+
+#include "utils/Timers.h"
 #define LOG_TAG "EglSurfaceTexture"
 
+#include <GLES/gl.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/BufferQueue.h>
+#include <gui/GLConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <hardware/gralloc.h>
+
 #include <cstdint>
 
 #include "EglSurfaceTexture.h"
 #include "EglUtil.h"
-#include "GLES/gl.h"
-#include "gui/BufferQueue.h"
-#include "gui/GLConsumer.h"
-#include "gui/IGraphicBufferProducer.h"
-#include "hardware/gralloc.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
+namespace {
+
+// Maximal number of buffers producer can dequeue without blocking.
+constexpr int kBufferProducerMaxDequeueBufferCount = 64;
+
+}  // namespace
 
 EglSurfaceTexture::EglSurfaceTexture(const uint32_t width, const uint32_t height)
     : mWidth(width), mHeight(height) {
@@ -38,7 +49,23 @@
     ALOGE("Failed to generate texture");
     return;
   }
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+  mGlConsumer = sp<GLConsumer>::make(mTextureId, GLConsumer::TEXTURE_EXTERNAL,
+                                     false, false);
+  mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
+  mGlConsumer->setDefaultBufferSize(mWidth, mHeight);
+  mGlConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_TEXTURE);
+  mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
+
+  mSurface = mGlConsumer->getSurface();
+  mSurface->setMaxDequeuedBufferCount(kBufferProducerMaxDequeueBufferCount);
+#else
   BufferQueue::createBufferQueue(&mBufferProducer, &mBufferConsumer);
+  // Set max dequeue buffer count for producer to maximal value to prevent
+  // blocking when dequeuing input buffers.
+  mBufferProducer->setMaxDequeuedBufferCount(
+      kBufferProducerMaxDequeueBufferCount);
   mGlConsumer = sp<GLConsumer>::make(
       mBufferConsumer, mTextureId, GLConsumer::TEXTURE_EXTERNAL, false, false);
   mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
@@ -47,6 +74,7 @@
   mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
 
   mSurface = sp<Surface>::make(mBufferProducer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 }
 
 EglSurfaceTexture::~EglSurfaceTexture() {
@@ -63,8 +91,41 @@
   return mGlConsumer->getCurrentBuffer();
 }
 
+void EglSurfaceTexture::setFrameAvailableListener(
+    const wp<ConsumerBase::FrameAvailableListener>& listener) {
+  mGlConsumer->setFrameAvailableListener(listener);
+}
+
+bool EglSurfaceTexture::waitForNextFrame(const std::chrono::nanoseconds timeout) {
+  return mSurface->waitForNextFrame(mGlConsumer->getFrameNumber(),
+                                    static_cast<nsecs_t>(timeout.count()));
+}
+
+std::chrono::nanoseconds EglSurfaceTexture::getTimestamp() {
+  return std::chrono::nanoseconds(mGlConsumer->getTimestamp());
+}
+
 GLuint EglSurfaceTexture::updateTexture() {
-  mGlConsumer->updateTexImage();
+  int previousFrameId;
+  int framesAdvance = 0;
+  // Consume buffers one at the time.
+  // Contrary to the code comments in GLConsumer, the GLConsumer acquires
+  // next queued buffer (not the most recently queued buffer).
+  while (true) {
+    previousFrameId = mGlConsumer->getFrameNumber();
+    mGlConsumer->updateTexImage();
+    int currentFrameId = mGlConsumer->getFrameNumber();
+    if (previousFrameId == currentFrameId) {
+      // Frame number didn't change after updating the texture,
+      // this means we're at the end of the queue and current attached
+      // buffer is the most recent buffer.
+      break;
+    }
+
+    framesAdvance++;
+    previousFrameId = currentFrameId;
+  }
+  ALOGV("%s: Advanced %d frames", __func__, framesAdvance);
   return mTextureId;
 }
 
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index faad7c4..c1f1169 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -17,16 +17,21 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 
-#include <cstdint>
+#include <GLES/gl.h>
+#include <gui/ConsumerBase.h>
+#include <gui/Surface.h>
+#include <utils/RefBase.h>
 
-#include "GLES/gl.h"
-#include "gui/Surface.h"
-#include "utils/RefBase.h"
+#include <chrono>
+#include <cstdint>
 
 namespace android {
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 class IGraphicBufferProducer;
 class IGraphicBufferConsumer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
 class GLConsumer;
 
 namespace companion {
@@ -51,6 +56,15 @@
   // Get height of surface / texture.
   uint32_t getHeight() const;
 
+  // Wait for next frame to be available in the surface
+  // until timeout.
+  //
+  // Returns false on timeout, true if new frame was received before timeout.
+  bool waitForNextFrame(std::chrono::nanoseconds timeout);
+
+  void setFrameAvailableListener(
+      const wp<ConsumerBase::FrameAvailableListener>& listener);
+
   // Update the texture with the most recent submitted buffer.
   // Most be called on thread with EGL context.
   //
@@ -68,9 +82,15 @@
   // See SurfaceTexture.getTransformMatrix for more details.
   std::array<float, 16> getTransformMatrix();
 
+  // Retrieves the timestamp associated with the texture image
+  // set by the most recent call to updateTexture.
+  std::chrono::nanoseconds getTimestamp();
+
  private:
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
   sp<IGraphicBufferProducer> mBufferProducer;
   sp<IGraphicBufferConsumer> mBufferConsumer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
   sp<GLConsumer> mGlConsumer;
   sp<Surface> mSurface;
   GLuint mTextureId;
diff --git a/services/camera/virtualcamera/util/EglUtil.cc b/services/camera/virtualcamera/util/EglUtil.cc
index 481d8f0..1c685f1 100644
--- a/services/camera/virtualcamera/util/EglUtil.cc
+++ b/services/camera/virtualcamera/util/EglUtil.cc
@@ -20,6 +20,7 @@
 
 #include <cstring>
 
+#include "EglDisplayContext.h"
 #include "GLES/gl.h"
 #include "log/log.h"
 
@@ -27,6 +28,9 @@
 namespace companion {
 namespace virtualcamera {
 
+// Lower bound for maximum supported texture size is at least 2048x2048
+constexpr int kDefaultMaxTextureSize = 2048;
+
 bool checkEglError(const char* operation) {
   GLenum err = glGetError();
   if (err == GL_NO_ERROR) {
@@ -45,6 +49,20 @@
   return strstr(extensions, extension) != nullptr;
 }
 
+int getMaximumTextureSize() {
+  static const int kMaxTextureSize = [] {
+    EglDisplayContext displayContext;
+    displayContext.makeCurrent();
+    int maxTextureSize = -1;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
+    return maxTextureSize;
+  }();
+  if (kMaxTextureSize <= 0) {
+    return kDefaultMaxTextureSize;
+  }
+  return kMaxTextureSize;
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/EglUtil.h b/services/camera/virtualcamera/util/EglUtil.h
index 71640e3..f339b4e 100644
--- a/services/camera/virtualcamera/util/EglUtil.h
+++ b/services/camera/virtualcamera/util/EglUtil.h
@@ -27,6 +27,8 @@
 // Returns true if the GL extension is supported, false otherwise.
 bool isGlExtensionSupported(const char* extension);
 
+int getMaximumTextureSize();
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/JpegUtil.h b/services/camera/virtualcamera/util/JpegUtil.h
index 184dd56..0a8df90 100644
--- a/services/camera/virtualcamera/util/JpegUtil.h
+++ b/services/camera/virtualcamera/util/JpegUtil.h
@@ -18,6 +18,7 @@
 #define ANDROID_COMPANION_VIRTUALCAMERA_JPEGUTIL_H
 
 #include <optional>
+#include <vector>
 
 #include "android/hardware_buffer.h"
 #include "util/Util.h"
diff --git a/services/camera/virtualcamera/util/MetadataUtil.cc b/services/camera/virtualcamera/util/MetadataUtil.cc
index 31a8776..4889830 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.cc
+++ b/services/camera/virtualcamera/util/MetadataUtil.cc
@@ -961,6 +961,20 @@
   return static_cast<int32_t>(entry.data.i32[0]);
 }
 
+std::optional<int32_t> getSensorOrientation(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata) {
+  auto metadata =
+      reinterpret_cast<const camera_metadata_t*>(cameraMetadata.metadata.data());
+
+  camera_metadata_ro_entry_t entry;
+  if (find_camera_metadata_ro_entry(metadata, ANDROID_SENSOR_ORIENTATION,
+                                    &entry) != OK) {
+    return std::nullopt;
+  }
+
+  return static_cast<int32_t>(entry.data.i32[0]);
+}
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/MetadataUtil.h b/services/camera/virtualcamera/util/MetadataUtil.h
index ca6f332..22d3657 100644
--- a/services/camera/virtualcamera/util/MetadataUtil.h
+++ b/services/camera/virtualcamera/util/MetadataUtil.h
@@ -488,6 +488,11 @@
 std::optional<int32_t> getDeviceId(
     const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata);
 
+// Return the value of ANDROID_SENSOR_ORIENTATION or nullopt if the key is not
+// present (which is equivalent to a orientation of 0).
+std::optional<int32_t> getSensorOrientation(
+    const aidl::android::hardware::camera::device::CameraMetadata& cameraMetadata);
+
 }  // namespace virtualcamera
 }  // namespace companion
 }  // namespace android
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.cc b/services/camera/virtualcamera/util/TestPatternHelper.cc
deleted file mode 100644
index 274996a..0000000
--- a/services/camera/virtualcamera/util/TestPatternHelper.cc
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// #define LOG_NDEBUG 0
-
-#define LOG_TAG "TestPatternHelper"
-
-#include "TestPatternHelper.h"
-
-#include <complex>
-#include <cstdint>
-
-#include "log/log.h"
-#include "nativebase/nativebase.h"
-#include "system/graphics.h"
-#include "ui/GraphicBuffer.h"
-#include "utils/Errors.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-namespace {
-
-using namespace std::chrono_literals;
-
-static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
-
-uint8_t julia(const std::complex<float> n, const std::complex<float> c) {
-  std::complex<float> z = n;
-  for (int i = 0; i < 64; i++) {
-    z = z * z + c;
-    if (std::abs(z) > 2.0) return i * 4;
-  }
-  return 0xff;
-}
-
-uint8_t pixelToFractal(const int x, const int y, const int width,
-                       const int height, const std::complex<float> c) {
-  std::complex<float> n(float(x) / float(width) - 0.5,
-                        float(y) / float(height) - 0.5);
-  return julia(n * 5.f, c);
-}
-
-void renderTestPatternYcbCr420(const android_ycbcr& ycbr, const int width,
-                               const int height, const int frameNumber) {
-  float time = float(frameNumber) / 120.0f;
-  const std::complex<float> c(std::sin(time), std::cos(time));
-
-  uint8_t* y = reinterpret_cast<uint8_t*>(ycbr.y);
-  uint8_t* cb = reinterpret_cast<uint8_t*>(ycbr.cb);
-  uint8_t* cr = reinterpret_cast<uint8_t*>(ycbr.cr);
-
-  for (int row = 0; row < height; row++) {
-    for (int col = 0; col < width; col++) {
-      y[row * ycbr.ystride + col] =
-          pixelToFractal(col, row, width, height, c * 0.78f);
-    }
-  }
-
-  int cWidth = width / 2;
-  int cHeight = height / 2;
-  for (int row = 0; row < cHeight; row++) {
-    for (int col = 0; col < cWidth; col++) {
-      cb[row * ycbr.cstride + col * ycbr.chroma_step] =
-          static_cast<uint8_t>((float(col) / float(cWidth)) * 255.f);
-      cr[row * ycbr.cstride + col * ycbr.chroma_step] =
-          static_cast<uint8_t>((float(row) / float(cHeight)) * 255.f);
-    }
-  }
-}
-
-}  // namespace
-
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber) {
-  if (surface == nullptr) {
-    ALOGE("%s: null surface, skipping render", __func__);
-    return;
-  }
-
-  ANativeWindowBuffer* buffer;
-  int fenceFd;
-  int ret = ANativeWindow_dequeueBuffer(surface.get(), &buffer, &fenceFd);
-  if (ret != NO_ERROR) {
-    ALOGE(
-        "%s: Error while deuqueing buffer from surface, "
-        "ANativeWindow_dequeueBuffer returned %d",
-        __func__, ret);
-    return;
-  }
-
-  if (buffer == nullptr) {
-    ALOGE("%s: ANativeWindowBuffer is null after dequeing", __func__);
-    return;
-  }
-
-  sp<Fence> fence = sp<Fence>::make(fenceFd);
-  if (fence->isValid()) {
-    ret = fence->wait(kAcquireFenceTimeout.count());
-    if (ret != NO_ERROR) {
-      ALOGE("%s: Timeout while waiting for the fence to clear", __func__);
-      ANativeWindow_queueBuffer(surface.get(), buffer, fence->dup());
-      return;
-    }
-  }
-
-  sp<GraphicBuffer> gBuffer = GraphicBuffer::from(buffer);
-  android_ycbcr ycbr;
-
-  ret = gBuffer->lockAsyncYCbCr(GraphicBuffer::USAGE_SW_WRITE_OFTEN, &ycbr,
-                                fence->dup());
-  if (ret != NO_ERROR) {
-    ALOGE("%s: Failed to lock buffer retrieved from surface, ret %d", __func__,
-          ret);
-    return;
-  }
-
-  renderTestPatternYcbCr420(ycbr, gBuffer->getWidth(), gBuffer->getHeight(),
-                            frameNumber);
-
-  ret = gBuffer->unlock();
-  if (ret != NO_ERROR) {
-    ALOGE("%s: Failed to unlock buffer, ret %d", __func__, ret);
-    return;
-  }
-
-  ret = ANativeWindow_queueBuffer(surface.get(), buffer, /*fenceFd=*/-1);
-  if (ret != NO_ERROR) {
-    ALOGE(
-        "%s: Error while queing buffer to surface, ANativeWindow_queueBuffer "
-        "returned %d",
-        __func__, ret);
-    return;
-  }
-}
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
diff --git a/services/camera/virtualcamera/util/TestPatternHelper.h b/services/camera/virtualcamera/util/TestPatternHelper.h
deleted file mode 100644
index f842b29..0000000
--- a/services/camera/virtualcamera/util/TestPatternHelper.h
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (C) 2023 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-#define ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
-
-#include "gui/Surface.h"
-
-namespace android {
-namespace companion {
-namespace virtualcamera {
-
-// Helper function for rendering test pattern into Surface.
-void renderTestPatternYCbCr420(sp<Surface> surface, int frameNumber);
-
-}  // namespace virtualcamera
-}  // namespace companion
-}  // namespace android
-
-#endif  // ANDROID_COMPANION_VIRTUALCAMERA_TESTPATTERNHELPER_H
diff --git a/services/camera/virtualcamera/util/Util.cc b/services/camera/virtualcamera/util/Util.cc
index 0c607d7..26015d1 100644
--- a/services/camera/virtualcamera/util/Util.cc
+++ b/services/camera/virtualcamera/util/Util.cc
@@ -23,6 +23,7 @@
 #include <cstdint>
 #include <memory>
 
+#include "EglUtil.h"
 #include "android/hardware_buffer.h"
 #include "jpeglib.h"
 #include "ui/GraphicBuffer.h"
@@ -35,11 +36,6 @@
 using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::hardware::common::NativeHandle;
 
-// Lower bound for maximal supported texture size is at least 2048x2048
-// but on most platforms will be more.
-// TODO(b/301023410) - Query actual max texture size.
-constexpr int kMaxTextureSize = 2048;
-constexpr int kLibJpegDctSize = DCTSIZE;
 constexpr int kMaxFpsUpperLimit = 60;
 
 constexpr std::array<Format, 2> kSupportedFormats{Format::YUV_420_888,
@@ -69,7 +65,6 @@
   if (gBuffer == nullptr) {
     return;
   }
-  gBuffer->unlock();
   status_t status = gBuffer->unlock();
   if (status != NO_ERROR) {
     ALOGE("Failed to unlock graphic buffer: %s", statusToString(status).c_str());
@@ -94,13 +89,16 @@
     return;
   }
 
-  const int32_t rawFence = fence != nullptr ? fence->get() : -1;
+  const int32_t rawFence = fence != nullptr ? dup(fence->get()) : -1;
   mLockStatus = static_cast<status_t>(AHardwareBuffer_lockPlanes(
       hwBuffer.get(), usageFlags, rawFence, nullptr, &mPlanes));
   if (mLockStatus != OK) {
     ALOGE("%s: Failed to lock graphic buffer: %s", __func__,
           statusToString(mLockStatus).c_str());
   }
+  if (rawFence >= 0) {
+    close(rawFence);
+  }
 }
 
 PlanesLockGuard::~PlanesLockGuard() {
@@ -141,8 +139,9 @@
     return false;
   }
 
-  if (width <= 0 || height <= 0 || width > kMaxTextureSize ||
-      height > kMaxTextureSize) {
+  int maxTextureSize = getMaximumTextureSize();
+  if (width <= 0 || height <= 0 || width > maxTextureSize ||
+      height > maxTextureSize) {
     return false;
   }
 
diff --git a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
index 41efce0..92f0745 100644
--- a/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
+++ b/services/mediacodec/seccomp_policy/mediaswcodec-arm.policy
@@ -85,5 +85,6 @@
 getegid32: 1
 getgroups32: 1
 sysinfo: 1
+setsockopt: 1
 
 @include /apex/com.android.media.swcodec/etc/seccomp_policy/code_coverage.arm.policy
diff --git a/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp b/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
index d329e54..2b31de7 100644
--- a/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
+++ b/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
@@ -22,6 +22,7 @@
 using ::android::MediaExtractorService;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    signal(SIGPIPE, SIG_IGN);
     auto service = sp<MediaExtractorService>::make();
     fuzzService(service, FuzzedDataProvider(data, size));
     return 0;
diff --git a/services/medialog/Android.bp b/services/medialog/Android.bp
index fdb56e5..7a4c3ad 100644
--- a/services/medialog/Android.bp
+++ b/services/medialog/Android.bp
@@ -29,6 +29,8 @@
         "packagemanager_aidl-cpp",
     ],
 
+    export_include_dirs: ["."],
+
     cflags: [
         "-Wall",
         "-Werror",
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index bd4ac38..c01d46e 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -555,22 +555,24 @@
 }
 
 std::pair<std::string, int32_t> AudioAnalytics::dump(
-        int32_t lines, int64_t sinceNs, const char *prefix) const
+        bool details, int32_t lines, int64_t sinceNs, const char *prefix) const
 {
     std::stringstream ss;
     int32_t ll = lines;
 
     if (ll > 0) {
-        auto [s, l] = mAnalyticsState->dump(ll, sinceNs, prefix);
+        auto [s, l] = mAnalyticsState->dump(details, ll, sinceNs, prefix);
         ss << s;
         ll -= l;
     }
-    if (ll > 0) {
+
+    // use details to dump prior state.
+    if (details && ll > 0) {
         ss << "Prior audioserver state:\n";
         --ll;
     }
-    if (ll > 0) {
-        auto [s, l] = mPreviousAnalyticsState->dump(ll, sinceNs, prefix);
+    if (details && ll > 0) {
+        auto [s, l] = mPreviousAnalyticsState->dump(details, ll, sinceNs, prefix);
         ss << s;
         ll -= l;
     }
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 7dc445b..201d740 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -93,18 +93,27 @@
 /* static */
 bool AudioPowerUsage::deviceFromString(const std::string& device_string, int32_t& device) {
     static std::map<std::string, int32_t> deviceTable = {
-        { "AUDIO_DEVICE_OUT_EARPIECE",             OUTPUT_EARPIECE },
-        { "AUDIO_DEVICE_OUT_SPEAKER_SAFE",         OUTPUT_SPEAKER_SAFE },
-        { "AUDIO_DEVICE_OUT_SPEAKER",              OUTPUT_SPEAKER },
-        { "AUDIO_DEVICE_OUT_WIRED_HEADSET",        OUTPUT_WIRED_HEADSET },
-        { "AUDIO_DEVICE_OUT_WIRED_HEADPHONE",      OUTPUT_WIRED_HEADSET },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO",        OUTPUT_BLUETOOTH_SCO },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP",       OUTPUT_BLUETOOTH_A2DP },
-        { "AUDIO_DEVICE_OUT_USB_HEADSET",          OUTPUT_USB_HEADSET },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET", OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_EARPIECE",                  OUTPUT_EARPIECE },
+        { "AUDIO_DEVICE_OUT_SPEAKER_SAFE",              OUTPUT_SPEAKER_SAFE },
+        { "AUDIO_DEVICE_OUT_SPEAKER",                   OUTPUT_SPEAKER },
+        { "AUDIO_DEVICE_OUT_WIRED_HEADSET",             OUTPUT_WIRED_HEADSET },
+        { "AUDIO_DEVICE_OUT_WIRED_HEADPHONE",           OUTPUT_WIRED_HEADSET },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO",             OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET",     OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP",            OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES", OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER",    OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLE_HEADSET",               OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_BLE_SPEAKER",               OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_BLE_BROADCAST",             OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_USB_HEADSET",               OUTPUT_USB_HEADSET },
+        { "AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET",         OUTPUT_DOCK },
+        { "AUDIO_DEVICE_OUT_HDMI",                      OUTPUT_HDMI },
 
         { "AUDIO_DEVICE_IN_BUILTIN_MIC",           INPUT_BUILTIN_MIC },
         { "AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET", INPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_IN_BLUETOOTH_BLE",         INPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_IN_BLE_HEADSET",           INPUT_BLUETOOTH_BLE },
         { "AUDIO_DEVICE_IN_WIRED_HEADSET",         INPUT_WIRED_HEADSET_MIC },
         { "AUDIO_DEVICE_IN_USB_DEVICE",            INPUT_USB_HEADSET_MIC },
         { "AUDIO_DEVICE_IN_BACK_MIC",              INPUT_BUILTIN_BACK_MIC },
diff --git a/services/mediametrics/MediaMetricsService.cpp b/services/mediametrics/MediaMetricsService.cpp
index f81db53..2ec4ac8 100644
--- a/services/mediametrics/MediaMetricsService.cpp
+++ b/services/mediametrics/MediaMetricsService.cpp
@@ -49,12 +49,9 @@
 // (0 for either of these disables that threshold)
 //
 static constexpr nsecs_t kMaxRecordAgeNs = 28 * 3600 * NANOS_PER_SECOND;
-// 2019/6: average daily per device is currently 375-ish;
-// setting this to 2000 is large enough to catch most devices
-// we'll lose some data on very very media-active devices, but only for
-// the gms collection; statsd will have already covered those for us.
-// This also retains enough information to help with bugreports
-static constexpr size_t kMaxRecords = 2000;
+
+// Max records to keep in queue which dump out for bugreports.
+static constexpr size_t kMaxRecords = 2500;
 
 // max we expire in a single call, to constrain how long we hold the
 // mutex, which also constrains how long a client might wait.
@@ -92,16 +89,12 @@
 /* static */
 std::pair<std::string, int64_t>
 MediaMetricsService::getSanitizedPackageNameAndVersionCode(uid_t uid) {
-    // Meyer's singleton, initialized on first access.
-    // mUidInfo is locked internally.
-    static mediautils::UidInfo uidInfo;
-
-    // get info.
-    mediautils::UidInfo::Info info = uidInfo.getInfo(uid);
-    if (useUidForPackage(info.package, info.installer)) {
+    const std::shared_ptr<const mediautils::UidInfo::Info> info =
+            mediautils::UidInfo::getInfo(uid);
+    if (useUidForPackage(info->package, info->installer)) {
         return { std::to_string(uid), /* versionCode */ 0 };
     } else {
-        return { info.package, info.versionCode };
+        return { info->package, info->versionCode };
     }
 }
 
@@ -315,7 +308,8 @@
 
             // TODO: maybe consider a better way of dumping audio analytics info.
             const int32_t linesToDump = all ? INT32_MAX : 1000;
-            auto [ dumpString, lines ] = mAudioAnalytics.dump(linesToDump, sinceNs, prefixptr);
+            auto [ dumpString, lines ] = mAudioAnalytics.dump(
+                    all, linesToDump, sinceNs, prefixptr);
             result << dumpString;
             if (lines == linesToDump) {
                 result << "-- some lines may be truncated --\n";
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0
new file mode 100644
index 0000000..802c2b5
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-0
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1
new file mode 100644
index 0000000..9ee6a15
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-1
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10
new file mode 100644
index 0000000..95006c8
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-10
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11
new file mode 100644
index 0000000..853be96
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-11
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12
new file mode 100644
index 0000000..c3e9848
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-12
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13
new file mode 100644
index 0000000..08b7f0d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-13
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14
new file mode 100644
index 0000000..20e5e80
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-14
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15
new file mode 100644
index 0000000..4e54f0b
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-15
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2
new file mode 100644
index 0000000..2b2495d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-2
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3
new file mode 100644
index 0000000..753594d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-3
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4
new file mode 100644
index 0000000..0ed2010
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-4
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5
new file mode 100644
index 0000000..f6141d1
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-5
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6
new file mode 100644
index 0000000..b93f618
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-6
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7
new file mode 100644
index 0000000..f8f296d
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-7
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8
new file mode 100644
index 0000000..29bdbc1
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-8
Binary files differ
diff --git a/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9 b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9
new file mode 100644
index 0000000..315f25e
--- /dev/null
+++ b/services/mediametrics/fuzzer/corpus/seed-2024-08-29-9
Binary files differ
diff --git a/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp
index c7468c7..572e969 100644
--- a/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_aidl_fuzzer.cpp
@@ -22,6 +22,7 @@
 using ::android::MediaMetricsService;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    signal(SIGPIPE, SIG_IGN);
     auto service = sp<MediaMetricsService>::make();
     fuzzService(service, FuzzedDataProvider(data, size));
     return 0;
diff --git a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
index c6793a9..c7b4297 100644
--- a/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
+++ b/services/mediametrics/fuzzer/mediametrics_service_fuzzer.cpp
@@ -17,6 +17,7 @@
  *****************************************************************************
  * Originally developed and contributed by Ittiam Systems Pvt. Ltd, Bangalore
  */
+#include <binder/IPCThreadState.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <media/MediaMetricsItem.h>
 #include <mediametricsservice/AudioTypes.h>
@@ -26,210 +27,158 @@
 #include <string.h>
 #include <utils/Log.h>
 #include <algorithm>
+#include <set>
 
 using namespace android;
+static constexpr size_t STATSD_LOG_LINES_MAX = 48;
+static unsigned long long kPackedCallingUid = (unsigned long long)AID_SYSTEM << 32;
+constexpr int8_t kMaxBytes = 100;
+constexpr int8_t kMinBytes = 0;
+constexpr size_t kMaxItemLength = 16;
 
 // low water mark
 constexpr size_t kLogItemsLowWater = 1;
 // high water mark
 constexpr size_t kLogItemsHighWater = 2;
-constexpr size_t kMaxItemLength = 16;
-constexpr size_t kMaxApis = 64;
+
+/*
+ * Concatenating strings to generate keys in such a way that the
+ * lambda function inside AudioAnalytics() added in the 'mAction' object is covered
+ */
+
+std::string keyMediaValues[] = {
+        "metrics.manager",
+        "mediadrm",
+        "audio.device.a2dp",
+        AMEDIAMETRICS_KEY_AUDIO_MIDI,
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_SPATIALIZER "*",
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD "*",
+        AMEDIAMETRICS_KEY_AUDIO_FLINGER,
+        AMEDIAMETRICS_KEY_AUDIO_POLICY,
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*",
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD "*",
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_STREAM "*",
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_DEVICE
+        "postBluetoothA2dpDeviceConnectionStateSuppressNoisyIntent",
+};
+
+std::string keyMediaAction[] = {
+        "createAudioPatch",
+        "connected",
+        AMEDIAMETRICS_PROP_EVENT_VALUE_CREATE,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_TIMEOUT,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAAUDIOSTREAM,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_DEVICECLOSED,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_SETMODE,
+        AMEDIAMETRICS_PROP_EVENT_VALUE_ENDAUDIOINTERVALGROUP,
+};
 
 class MediaMetricsServiceFuzzer {
-   public:
-    void invokeStartsWith(const uint8_t *data, size_t size);
-    void invokeInstantiate(const uint8_t *data, size_t size);
-    void invokePackageInstallerCheck(const uint8_t *data, size_t size);
-    void invokeItemManipulation(const uint8_t *data, size_t size);
-    void invokeItemExpansion(const uint8_t *data, size_t size);
-    void invokeTimeMachineStorage(const uint8_t *data, size_t size);
-    void invokeTransactionLog(const uint8_t *data, size_t size);
-    void invokeAnalyticsAction(const uint8_t *data, size_t size);
-    void invokeAudioAnalytics(const uint8_t *data, size_t size);
-    void invokeTimedAction(const uint8_t *data, size_t size);
-    void process(const uint8_t *data, size_t size);
+  public:
+    MediaMetricsServiceFuzzer(const uint8_t* data, size_t size) : mFdp(data, size){};
+    void process();
+    void invokeStartsWith();
+    void invokeInstantiate();
+    void invokePackageInstallerCheck();
+    void invokeTimeMachineStorage();
+    void invokeTransactionLog();
+    void invokeAnalyticsAction();
+    void invokeAudioAnalytics();
+    void invokeTimedAction();
+    void setKeyValues(std::shared_ptr<mediametrics::Item>& item, std::string keyValue);
+    std::shared_ptr<mediametrics::Item> CreateItem();
+    sp<MediaMetricsService> mMediaMetricsService;
+    FuzzedDataProvider mFdp;
     std::atomic_int mValue = 0;
 };
 
-void MediaMetricsServiceFuzzer::invokeStartsWith(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    while (fdp.remaining_bytes()) {
-        android::mediametrics::startsWith(fdp.ConsumeRandomLengthString(),
-                                          fdp.ConsumeRandomLengthString());
-    }
+void MediaMetricsServiceFuzzer::setKeyValues(std::shared_ptr<mediametrics::Item>& item,
+                                             std::string keyValue) {
+    auto invokeActionAPIs = mFdp.PickValueInArray<const std::function<void()>>({
+            [&]() { item->setInt32(keyValue.c_str(), mFdp.ConsumeIntegral<int32_t>()); },
+            [&]() { item->addInt32(keyValue.c_str(), mFdp.ConsumeIntegral<int32_t>()); },
+            [&]() { item->setInt64(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>()); },
+            [&]() { item->addInt64(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>()); },
+            [&]() { item->setDouble(keyValue.c_str(), mFdp.ConsumeFloatingPoint<double>()); },
+            [&]() { item->addDouble(keyValue.c_str(), mFdp.ConsumeFloatingPoint<double>()); },
+            [&]() { item->setTimestamp(mFdp.ConsumeIntegral<int64_t>()); },
+            [&]() {
+                std::string value = mFdp.ConsumeBool()
+                                            ? mFdp.ConsumeRandomLengthString(kMaxBytes)
+                                            : mFdp.PickValueInArray<std::string>(keyMediaAction);
+                item->setCString(keyValue.c_str(), value.c_str());
+            },
+            [&]() {
+                item->setRate(keyValue.c_str(), mFdp.ConsumeIntegral<int64_t>(),
+                              mFdp.ConsumeIntegral<int64_t>());
+            },
+            [&]() {
+                mediametrics::LogItem<1> itemTemp(mFdp.ConsumeRandomLengthString(kMaxBytes));
+                itemTemp.setPid(mFdp.ConsumeIntegral<int16_t>())
+                        .setUid(mFdp.ConsumeIntegral<int16_t>());
+
+                int32_t i = mFdp.ConsumeIntegral<int32_t>();
+                itemTemp.set(std::to_string(i).c_str(), (int32_t)i);
+                itemTemp.updateHeader();
+                (void)item->readFromByteString(itemTemp.getBuffer(), itemTemp.getLength());
+            },
+
+    });
+    invokeActionAPIs();
 }
 
-void MediaMetricsServiceFuzzer::invokeInstantiate(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    sp mediaMetricsService = new MediaMetricsService();
-
-    while (fdp.remaining_bytes()) {
-        std::unique_ptr<mediametrics::Item> random_key(
-            mediametrics::Item::create(fdp.ConsumeRandomLengthString()));
-        mediaMetricsService->submit(random_key.get());
-        random_key->setInt32(fdp.ConsumeRandomLengthString().c_str(),
-                             fdp.ConsumeIntegral<int32_t>());
-        mediaMetricsService->submit(random_key.get());
-
-        std::unique_ptr<mediametrics::Item> audiotrack_key(
-            mediametrics::Item::create("audiotrack"));
-        mediaMetricsService->submit(audiotrack_key.get());
-        audiotrack_key->addInt32(fdp.ConsumeRandomLengthString().c_str(),
-                                 fdp.ConsumeIntegral<int32_t>());
-        mediaMetricsService->submit(audiotrack_key.get());
+std::shared_ptr<mediametrics::Item> MediaMetricsServiceFuzzer::CreateItem() {
+    std::string key;
+    if (mFdp.ConsumeBool()) {
+        key = mFdp.ConsumeRandomLengthString(kMaxItemLength);
+    } else {
+        key = mFdp.PickValueInArray<std::string>(keyMediaValues);
     }
-}
 
-void MediaMetricsServiceFuzzer::invokePackageInstallerCheck(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-    while (fdp.remaining_bytes()) {
-        MediaMetricsService::useUidForPackage(fdp.ConsumeRandomLengthString().c_str(),
-                                              fdp.ConsumeRandomLengthString().c_str());
-    }
-}
-
-void MediaMetricsServiceFuzzer::invokeItemManipulation(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
-    mediametrics::Item item(fdp.ConsumeRandomLengthString().c_str());
-    while (fdp.remaining_bytes()) {
-        const uint8_t action = fdp.ConsumeIntegralInRange<uint8_t>(0, 16);
-        const std::string key = fdp.ConsumeRandomLengthString();
-        if (fdp.remaining_bytes() < 1 || key.length() < 1) {
-            break;
+    std::shared_ptr<mediametrics::Item> item = std::make_shared<mediametrics::Item>(key.c_str());
+    size_t numKeys = mFdp.ConsumeIntegralInRange<size_t>(kMinBytes, kMaxBytes);
+    std::set<std::string> keySet;
+    for (size_t i = 0; i < numKeys; ++i) {
+        std::string keyValue;
+        if (mFdp.ConsumeBool()) {
+            keyValue = mFdp.ConsumeRandomLengthString(kMaxBytes);
+        } else {
+            keyValue = mFdp.PickValueInArray<std::string>(
+                    {AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_STATE, "logSessionIkeyd"});
         }
-        switch (action) {
-            case 0: {
-                item.setInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
-                break;
-            }
-            case 1: {
-                item.addInt32(key.c_str(), fdp.ConsumeIntegral<int32_t>());
-                break;
-            }
-            case 2: {
-                int32_t i32 = 0;
-                item.getInt32(key.c_str(), &i32);
-                break;
-            }
-            case 3: {
-                item.setInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
-                break;
-            }
-            case 4: {
-                item.addInt64(key.c_str(), fdp.ConsumeIntegral<int64_t>());
-                break;
-            }
-            case 5: {
-                int64_t i64 = 0;
-                item.getInt64(key.c_str(), &i64);
-                break;
-            }
-            case 6: {
-                item.setDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
-                break;
-            }
-            case 7: {
-                item.addDouble(key.c_str(), fdp.ConsumeFloatingPoint<double>());
-                break;
-            }
-            case 8: {
-                double d = 0;
-                item.getDouble(key.c_str(), &d);
-                break;
-            }
-            case 9: {
-                item.setCString(key.c_str(), fdp.ConsumeRandomLengthString().c_str());
-                break;
-            }
-            case 10: {
-                char *s = nullptr;
-                item.getCString(key.c_str(), &s);
-                if (s) free(s);
-                break;
-            }
-            case 11: {
-                std::string s;
-                item.getString(key.c_str(), &s);
-                break;
-            }
-            case 12: {
-                item.setRate(key.c_str(), fdp.ConsumeIntegral<int64_t>(),
-                             fdp.ConsumeIntegral<int64_t>());
-                break;
-            }
-            case 13: {
-                int64_t b = 0, h = 0;
-                double d = 0;
-                item.getRate(key.c_str(), &b, &h, &d);
-                break;
-            }
-            case 14: {
-                (void)item.filter(key.c_str());
-                break;
-            }
-            case 15: {
-                const char *arr[1] = {""};
-                arr[0] = const_cast<char *>(key.c_str());
-                (void)item.filterNot(1, arr);
-                break;
-            }
-            case 16: {
-                (void)item.toString().c_str();
-                break;
-            }
+        if (keySet.find(keyValue) == keySet.end()) {
+            setKeyValues(item, keyValue);
+            keySet.insert(keyValue);
         }
     }
-
-    Parcel p;
-    mediametrics::Item item2;
-
-    (void)item.writeToParcel(&p);
-    p.setDataPosition(0);  // rewind for reading
-    (void)item2.readFromParcel(p);
-
-    char *byteData = nullptr;
-    size_t length = 0;
-    (void)item.writeToByteString(&byteData, &length);
-    (void)item2.readFromByteString(byteData, length);
-    if (byteData) {
-        free(byteData);
-    }
-
-    sp mediaMetricsService = new MediaMetricsService();
-    mediaMetricsService->submit(&item2);
+    return item;
 }
 
-void MediaMetricsServiceFuzzer::invokeItemExpansion(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
-    mediametrics::LogItem<1> item("FuzzItem");
-    item.setPid(fdp.ConsumeIntegral<int16_t>()).setUid(fdp.ConsumeIntegral<int16_t>());
-
-    while (fdp.remaining_bytes()) {
-        int32_t i = fdp.ConsumeIntegral<int32_t>();
-        item.set(std::to_string(i).c_str(), (int32_t)i);
-    }
-    item.updateHeader();
-
-    mediametrics::Item item2;
-    (void)item2.readFromByteString(item.getBuffer(), item.getLength());
-
-    sp mediaMetricsService = new MediaMetricsService();
-    mediaMetricsService->submit(&item2);
+void MediaMetricsServiceFuzzer::invokeStartsWith() {
+    android::mediametrics::startsWith(mFdp.ConsumeRandomLengthString(kMaxBytes),
+                                      mFdp.ConsumeRandomLengthString(kMaxBytes));
 }
 
-void MediaMetricsServiceFuzzer::invokeTimeMachineStorage(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeInstantiate() {
+    auto item = CreateItem();
+    mMediaMetricsService->submit(item.get());
+}
 
-    auto item = std::make_shared<mediametrics::Item>("FuzzKey");
-    int32_t i32 = fdp.ConsumeIntegral<int32_t>();
-    int64_t i64 = fdp.ConsumeIntegral<int64_t>();
-    double d = fdp.ConsumeFloatingPoint<double>();
-    std::string str = fdp.ConsumeRandomLengthString();
-    std::pair<int64_t, int64_t> pair(fdp.ConsumeIntegral<int64_t>(),
-                                     fdp.ConsumeIntegral<int64_t>());
+void MediaMetricsServiceFuzzer::invokePackageInstallerCheck() {
+    MediaMetricsService::useUidForPackage(mFdp.ConsumeRandomLengthString(kMaxBytes).c_str(),
+                                          mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
+}
+
+void MediaMetricsServiceFuzzer::invokeTimeMachineStorage() {
+    auto item = CreateItem();
+    int32_t i32 = mFdp.ConsumeIntegral<int32_t>();
+    int64_t i64 = mFdp.ConsumeIntegral<int64_t>();
+    double d = mFdp.ConsumeFloatingPoint<double>();
+    std::string str = mFdp.ConsumeRandomLengthString(kMaxBytes);
+    std::pair<int64_t, int64_t> pair(mFdp.ConsumeIntegral<int64_t>(),
+                                     mFdp.ConsumeIntegral<int64_t>());
     (*item).set("i32", i32).set("i64", i64).set("double", d).set("string", str).set("rate", pair);
 
     android::mediametrics::TimeMachine timeMachine;
@@ -253,124 +202,89 @@
     timeMachine.get("Key.string", &str, -1);
 }
 
-void MediaMetricsServiceFuzzer::invokeTransactionLog(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
-    auto item = std::make_shared<mediametrics::Item>("Key1");
-    (*item)
-        .set("one", fdp.ConsumeIntegral<int32_t>())
-        .set("two", fdp.ConsumeIntegral<int32_t>())
-        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
+void MediaMetricsServiceFuzzer::invokeTransactionLog() {
+    auto item = CreateItem();
 
     android::mediametrics::TransactionLog transactionLog(
         kLogItemsLowWater, kLogItemsHighWater);  // keep at most 2 items
     transactionLog.size();
 
     transactionLog.put(item);
-    transactionLog.size();
-
-    auto item2 = std::make_shared<mediametrics::Item>("Key2");
-    (*item2)
-        .set("three", fdp.ConsumeIntegral<int32_t>())
-        .set("[Key1]three", fdp.ConsumeIntegral<int32_t>())
-        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
-
-    transactionLog.put(item2);
-    transactionLog.size();
-
-    auto item3 = std::make_shared<mediametrics::Item>("Key3");
-    (*item3)
-        .set("six", fdp.ConsumeIntegral<int32_t>())
-        .set("[Key1]four", fdp.ConsumeIntegral<int32_t>())  // affects Key1
-        .set("[Key1]five", fdp.ConsumeIntegral<int32_t>())  // affects key1
-        .setTimestamp(fdp.ConsumeIntegral<int32_t>());
-
-    transactionLog.put(item3);
-    transactionLog.size();
 }
 
-void MediaMetricsServiceFuzzer::invokeAnalyticsAction(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
-
+void MediaMetricsServiceFuzzer::invokeAnalyticsAction() {
     mediametrics::AnalyticsActions analyticsActions;
     bool action = false;
 
-    while (fdp.remaining_bytes()) {
-        analyticsActions.addAction(
-            (fdp.ConsumeRandomLengthString() + std::string(".event")).c_str(),
-            fdp.ConsumeRandomLengthString(),
+    analyticsActions.addAction(
+            (mFdp.ConsumeRandomLengthString(kMaxBytes) + std::string(".event")).c_str(),
+            mFdp.ConsumeRandomLengthString(kMaxBytes),
             std::make_shared<mediametrics::AnalyticsActions::Function>(
-                [&](const std::shared_ptr<const android::mediametrics::Item> &) {
-                    action = true;
-                }));
-    }
+                    [&](const std::shared_ptr<const android::mediametrics::Item>&) {
+                        action = true;
+                    }));
 
-    FuzzedDataProvider fdp2 = FuzzedDataProvider(data, size);
-    size_t apiCount = 0;
-    while (fdp2.remaining_bytes() && ++apiCount <= kMaxApis) {
-        // make a test item
-        auto item = std::make_shared<mediametrics::Item>(
-                fdp2.ConsumeRandomLengthString(kMaxItemLength).c_str());
-        (*item).set("event", fdp2.ConsumeRandomLengthString().c_str());
+    // make a test item
+    auto item = CreateItem();
+    (*item).set("event", mFdp.ConsumeRandomLengthString(kMaxBytes).c_str());
 
-        // get the actions and execute them
-        auto actions = analyticsActions.getActionsForItem(item);
-        for (const auto &action : actions) {
-            action->operator()(item);
+    // get the actions and execute them
+    auto actions = analyticsActions.getActionsForItem(item);
+    for (const auto& action : actions) {
+        action->operator()(item);
         }
-    }
 }
 
-void MediaMetricsServiceFuzzer::invokeAudioAnalytics(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeAudioAnalytics() {
+    int32_t maxLogLine = mFdp.ConsumeIntegralInRange<int32_t>(0, STATSD_LOG_LINES_MAX);
     std::shared_ptr<android::mediametrics::StatsdLog> statsdLog =
-            std::make_shared<android::mediametrics::StatsdLog>(10);
+            std::make_shared<android::mediametrics::StatsdLog>(maxLogLine);
     android::mediametrics::AudioAnalytics audioAnalytics{statsdLog};
 
-    while (fdp.remaining_bytes()) {
-        auto item = std::make_shared<mediametrics::Item>(fdp.ConsumeRandomLengthString().c_str());
-        int32_t transactionUid = fdp.ConsumeIntegral<int32_t>();  // arbitrary
-        (*item)
-            .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
-            .set(fdp.ConsumeRandomLengthString().c_str(), fdp.ConsumeIntegral<int32_t>())
-            .set(AMEDIAMETRICS_PROP_ALLOWUID, transactionUid)
-            .setUid(transactionUid)
-            .setTimestamp(fdp.ConsumeIntegral<int32_t>());
-        audioAnalytics.submit(item, fdp.ConsumeBool());
+    auto item = CreateItem();
+    Parcel parcel;
+    item->writeToParcel(&parcel);
+    parcel.setDataPosition(0);
+    if (mFdp.ConsumeBool()) {
+        item->readFromParcel(parcel);
     }
-
-    audioAnalytics.dump(1000);
+    audioAnalytics.submit(item, mFdp.ConsumeBool());
 }
 
-void MediaMetricsServiceFuzzer::invokeTimedAction(const uint8_t *data, size_t size) {
-    FuzzedDataProvider fdp = FuzzedDataProvider(data, size);
+void MediaMetricsServiceFuzzer::invokeTimedAction() {
     android::mediametrics::TimedAction timedAction;
+    timedAction.postIn(std::chrono::seconds(mFdp.ConsumeIntegral<uint32_t>()),
+                       [this] { ++mValue; });
+    timedAction.size();
+}
 
-    while (fdp.remaining_bytes()) {
-        timedAction.postIn(std::chrono::seconds(fdp.ConsumeIntegral<int32_t>()),
-                           [this] { ++mValue; });
-        timedAction.size();
+void MediaMetricsServiceFuzzer::process() {
+    mMediaMetricsService = sp<MediaMetricsService>::make();
+
+    if (mFdp.ConsumeBool()) {
+        IPCThreadState::self()->restoreCallingIdentity(kPackedCallingUid);
+    } else {
+        IPCThreadState::self()->restoreCallingIdentity(mFdp.ConsumeIntegral<size_t>());
+    }
+    while (mFdp.remaining_bytes()) {
+        auto invokeAPIs = mFdp.PickValueInArray<const std::function<void()>>({
+                [&]() { invokeStartsWith(); },
+                [&]() { invokeInstantiate(); },
+                [&]() { invokePackageInstallerCheck(); },
+                [&]() { invokeTimeMachineStorage(); },
+                [&]() { invokeTransactionLog(); },
+                [&]() { invokeAudioAnalytics(); },
+                [&]() { invokeTimedAction(); },
+        });
+        invokeAPIs();
     }
 }
 
-void MediaMetricsServiceFuzzer::process(const uint8_t *data, size_t size) {
-    invokeStartsWith(data, size);
-    invokeInstantiate(data, size);
-    invokePackageInstallerCheck(data, size);
-    invokeItemManipulation(data, size);
-    invokeItemExpansion(data, size);
-    invokeTimeMachineStorage(data, size);
-    invokeTransactionLog(data, size);
-    invokeAnalyticsAction(data, size);
-    invokeAudioAnalytics(data, size);
-    invokeTimedAction(data, size);
-}
-
-extern "C" int LLVMFuzzerTestOneInput(const uint8_t *data, size_t size) {
+extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
     if (size < 1) {
         return 0;
     }
-    MediaMetricsServiceFuzzer mediaMetricsServiceFuzzer;
-    mediaMetricsServiceFuzzer.process(data, size);
+    MediaMetricsServiceFuzzer mediaMetricsServiceFuzzer(data, size);
+    mediaMetricsServiceFuzzer.process();
     return 0;
 }
diff --git a/services/mediametrics/include/mediametricsservice/AnalyticsState.h b/services/mediametrics/include/mediametricsservice/AnalyticsState.h
index 09c0b4c..1dabe5d 100644
--- a/services/mediametrics/include/mediametricsservice/AnalyticsState.h
+++ b/services/mediametrics/include/mediametricsservice/AnalyticsState.h
@@ -83,11 +83,12 @@
      * different locks, so may not be 100% consistent with the last data
      * delivered.
      *
+     * \param details dumps the detailed internal state.
      * \param lines the maximum number of lines in the string returned.
      * \param sinceNs the nanoseconds since Unix epoch to start dump (0 shows all)
      * \param prefix the desired key prefix to match (nullptr shows all)
      */
-    std::pair<std::string, int32_t> dump(
+    std::pair<std::string, int32_t> dump(bool details,
             int32_t lines = INT32_MAX, int64_t sinceNs = 0, const char *prefix = nullptr) const {
         std::stringstream ss;
         int32_t ll = lines;
@@ -96,7 +97,7 @@
             ss << "TransactionLog: gc(" << mTransactionLog.getGarbageCollectionCount() << ")\n";
             --ll;
         }
-        if (ll > 0) {
+        if (details && ll > 0) {
             auto [s, l] = mTransactionLog.dump(ll, sinceNs, prefix);
             ss << s;
             ll -= l;
@@ -105,7 +106,7 @@
             ss << "TimeMachine: gc(" << mTimeMachine.getGarbageCollectionCount() << ")\n";
             --ll;
         }
-        if (ll > 0) {
+        if (details && ll > 0) {
             auto [s, l] = mTimeMachine.dump(ll, sinceNs, prefix);
             ss << s;
             ll -= l;
diff --git a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
index f0a4ac8..57f55c1 100644
--- a/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
+++ b/services/mediametrics/include/mediametricsservice/AudioAnalytics.h
@@ -67,11 +67,12 @@
      * different locks, so may not be 100% consistent with the last data
      * delivered.
      *
+     * \param details dumps the detailed internal state.
      * \param lines the maximum number of lines in the string returned.
      * \param sinceNs the nanoseconds since Unix epoch to start dump (0 shows all)
      * \param prefix the desired key prefix to match (nullptr shows all)
      */
-    std::pair<std::string, int32_t> dump(
+    std::pair<std::string, int32_t> dump(bool details,
             int32_t lines = INT32_MAX, int64_t sinceNs = 0, const char *prefix = nullptr) const;
 
     /**
diff --git a/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
index 6e5a5cf..cf09113 100644
--- a/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
+++ b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
@@ -71,6 +71,9 @@
         OUTPUT_BLUETOOTH_SCO    = 0x10,
         OUTPUT_BLUETOOTH_A2DP   = 0x20,
         OUTPUT_SPEAKER_SAFE     = 0x40,
+        OUTPUT_BLUETOOTH_BLE    = 0x80,
+        OUTPUT_DOCK             = 0x100,
+        OUTPUT_HDMI             = 0x200,
 
         INPUT_DEVICE_BIT        = 0x40000000,
         INPUT_BUILTIN_MIC       = INPUT_DEVICE_BIT | 0x1, // non-negative positive int32.
@@ -78,6 +81,7 @@
         INPUT_WIRED_HEADSET_MIC = INPUT_DEVICE_BIT | 0x4,
         INPUT_USB_HEADSET_MIC   = INPUT_DEVICE_BIT | 0x8,
         INPUT_BLUETOOTH_SCO     = INPUT_DEVICE_BIT | 0x10,
+        INPUT_BLUETOOTH_BLE     = INPUT_DEVICE_BIT | 0x20,
     };
 
     static bool typeFromString(const std::string& type_string, int32_t& type);
diff --git a/services/mediametrics/include/mediametricsservice/AudioTypes.h b/services/mediametrics/include/mediametricsservice/AudioTypes.h
index b5fe28b..59654bf 100644
--- a/services/mediametrics/include/mediametricsservice/AudioTypes.h
+++ b/services/mediametrics/include/mediametricsservice/AudioTypes.h
@@ -18,6 +18,7 @@
 
 #include <string>
 #include <unordered_map>
+#include <vector>
 
 namespace android::mediametrics::types {
 
diff --git a/services/mediametrics/include/mediametricsservice/TimedAction.h b/services/mediametrics/include/mediametricsservice/TimedAction.h
index 8b53ded..8901ced 100644
--- a/services/mediametrics/include/mediametricsservice/TimedAction.h
+++ b/services/mediametrics/include/mediametricsservice/TimedAction.h
@@ -81,9 +81,8 @@
     void threadLoop() NO_THREAD_SAFETY_ANALYSIS { // thread safety doesn't cover unique_lock
         std::unique_lock l(mLock);
         while (!mQuit) {
-            auto sleepUntilTime = std::chrono::time_point<TimerClock>::max();
             if (!mMap.empty()) {
-                sleepUntilTime = mMap.begin()->first;
+                auto sleepUntilTime = mMap.begin()->first;
                 const auto now = TimerClock::now();
                 if (sleepUntilTime <= now) {
                     auto node = mMap.extract(mMap.begin()); // removes from mMap.
@@ -96,8 +95,17 @@
                 // of REALTIME specification, use kWakeupInterval to ensure minimum
                 // granularity if suspended.
                 sleepUntilTime = std::min(sleepUntilTime, now + kWakeupInterval);
+                mCondition.wait_until(l, sleepUntilTime);
+            } else {
+                // As TimerClock is system_clock (which is not monotonic), libcxx's
+                // implementation of condition_variable::wait_until(l, std::chrono::time_point)
+                // recalculates the 'until' time into the wait duration and then goes back to the
+                // absolute timestamp when calling pthread_cond_timedwait(); this back-and-forth
+                // calculation sometimes loses the 'max' value because enough time passes in
+                // between, and instead passes incorrect timestamp into the syscall, causing a
+                // crash. Mitigating it by explicitly calling the non-timed wait here.
+                mCondition.wait(l);
             }
-            mCondition.wait_until(l, sleepUntilTime);
         }
     }
 
diff --git a/services/mediametrics/tests/mediametrics_tests.cpp b/services/mediametrics/tests/mediametrics_tests.cpp
index 4a6aee4..a7684f4 100644
--- a/services/mediametrics/tests/mediametrics_tests.cpp
+++ b/services/mediametrics/tests/mediametrics_tests.cpp
@@ -850,14 +850,14 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(true /* details */, 1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item2, false /* isTrusted */));
 
   // Check that we have some info in the dump.
-  ASSERT_LT(9, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_LT(9, audioAnalytics.dump(true /* details */, 1000).second /* lines */);
 }
 
 TEST(mediametrics_tests, audio_analytics_permission2) {
@@ -888,14 +888,14 @@
 
   // TODO: Verify contents of AudioAnalytics.
   // Currently there is no getter API in AudioAnalytics besides dump.
-  ASSERT_EQ(10, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_EQ(10, audioAnalytics.dump(true /* details */, 1000).second /* lines */);
 
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item, true /* isTrusted */));
   // untrusted entities can add to an existing key
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item2, false /* isTrusted */));
 
   // Check that we have some info in the dump.
-  ASSERT_LT(9, audioAnalytics.dump(1000).second /* lines */);
+  ASSERT_LT(9, audioAnalytics.dump(true /* details */, 1000).second /* lines */);
 }
 
 TEST(mediametrics_tests, audio_analytics_dump) {
@@ -922,13 +922,13 @@
   ASSERT_EQ(NO_ERROR, audioAnalytics.submit(item3, true /* isTrusted */));
 
   // find out how many lines we have.
-  auto [string, lines] = audioAnalytics.dump(1000);
+  auto [string, lines] = audioAnalytics.dump(true /* details */, 1000);
   ASSERT_EQ(lines, (int32_t) countNewlines(string.c_str()));
 
   printf("AudioAnalytics: %s", string.c_str());
   // ensure that dump operates over those lines.
   for (int32_t ll = 0; ll < lines; ++ll) {
-      auto [s, l] = audioAnalytics.dump(ll);
+      auto [s, l] = audioAnalytics.dump(true /* details */, ll);
       ASSERT_EQ(ll, l);
       ASSERT_EQ(ll, (int32_t) countNewlines(s.c_str()));
   }
diff --git a/services/mediaresourcemanager/ResourceManagerMetrics.cpp b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
index a8a1de1..8b3711c 100644
--- a/services/mediaresourcemanager/ResourceManagerMetrics.cpp
+++ b/services/mediaresourcemanager/ResourceManagerMetrics.cpp
@@ -821,7 +821,7 @@
     metricsLog += getAppsPixelCount(mProcessPixelsMap);
     metricsLog += getAppsCodecUsageMetrics(mProcessConcurrentCodecsMap);
 
-    return std::move(metricsLog);
+    return metricsLog;
 }
 
 } // namespace android
diff --git a/services/mediaresourcemanager/ResourceManagerService.cpp b/services/mediaresourcemanager/ResourceManagerService.cpp
index 9c2fb7c..f12a5d6 100644
--- a/services/mediaresourcemanager/ResourceManagerService.cpp
+++ b/services/mediaresourcemanager/ResourceManagerService.cpp
@@ -310,6 +310,7 @@
     mServiceLog->add(log);
 
     std::scoped_lock lock{mLock};
+    ClientInfoParcel updatedClientInfo = clientInfo;
     if (!mProcessInfo->isPidUidTrusted(pid, uid)) {
         pid_t callingPid = IPCThreadState::self()->getCallingPid();
         uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -317,6 +318,8 @@
                 __FUNCTION__, pid, uid, callingPid, callingUid);
         pid = callingPid;
         uid = callingUid;
+        updatedClientInfo.pid = callingPid;
+        updatedClientInfo.uid = callingUid;
     }
     ResourceInfos& infos = getResourceInfosForEdit(pid, mMap);
     ResourceInfo& info = getResourceInfoForEdit(clientInfo, client, infos);
@@ -342,7 +345,7 @@
     }
     if (info.deathNotifier == nullptr && client != nullptr) {
         info.deathNotifier = DeathNotifier::Create(
-            client, ref<ResourceManagerService>(), clientInfo);
+            client, ref<ResourceManagerService>(), updatedClientInfo);
     }
     if (mObserverService != nullptr && !resourceAdded.empty()) {
         mObserverService->onResourceAdded(uid, pid, resourceAdded);
diff --git a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
index 679ab13..49f68e9 100644
--- a/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
+++ b/services/mediaresourcemanager/ResourceManagerServiceUtils.cpp
@@ -94,7 +94,7 @@
         str.append("\n");
     }
 
-    return std::move(str);
+    return str;
 }
 
 bool ResourceList::operator==(const ResourceList& rhs) const {
diff --git a/services/mediaresourcemanager/fuzzer/Android.bp b/services/mediaresourcemanager/fuzzer/Android.bp
index 5bac062..3f04f69 100644
--- a/services/mediaresourcemanager/fuzzer/Android.bp
+++ b/services/mediaresourcemanager/fuzzer/Android.bp
@@ -47,7 +47,7 @@
     ],
     fuzz_config: {
         cc: [
-            "android-media-fuzzing-reports@google.com",
+            "girishshetty@google.com",
         ],
         componentid: 155276,
         hotlists: [
diff --git a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
index 6253df7..1cad482 100644
--- a/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
+++ b/services/mediaresourcemanager/fuzzer/resourcemanager_service_fuzzer.cpp
@@ -26,6 +26,7 @@
 using ndk::SharedRefBase;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+   signal(SIGPIPE, SIG_IGN);
    std::shared_ptr<ResourceManagerService> service = ResourceManagerService::Create();
    fuzzService(service->asBinder().get(), FuzzedDataProvider(data, size));
    return 0;
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 6a64823..ac41959 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -29,6 +29,9 @@
         "libactivitymanager_aidl",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
+    ],
     include_dirs: [
         "frameworks/av/include",
         "frameworks/av/services/mediaresourcemanager",
@@ -81,6 +84,9 @@
         "libactivitymanager_aidl",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
+    ],
     include_dirs: [
         "frameworks/av/include",
         "frameworks/av/services/mediaresourcemanager",
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 5b4fca9..d663f37 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -205,6 +205,8 @@
           "sample_rate=%u, channel_mask=%#x, device=%d",
           __func__, config->format, config->sample_rate,
           config->channel_mask, deviceId);
+
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
                                                                 &attributes,
                                                                 config,
@@ -246,7 +248,7 @@
           __func__, config->format, getDeviceId(), getSessionId());
 
     // Create MMAP/NOIRQ buffer.
-    result = createMmapBuffer();
+    result = createMmapBuffer_l();
     if (result != AAUDIO_OK) {
         goto error;
     }
@@ -283,7 +285,7 @@
     return result;
 
 error:
-    close();
+    close_l();
     // restore original requests
     setDeviceId(mRequestedDeviceId);
     setSessionId(requestedSessionId);
@@ -291,13 +293,28 @@
 }
 
 void AAudioServiceEndpointMMAP::close() {
-    if (mMmapStream != nullptr) {
-        // Needs to be explicitly cleared or CTS will fail but it is not clear why.
-        mMmapStream.clear();
+    bool closedIt = false;
+    {
+        const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+        closedIt = close_l();
+    }
+    if (closedIt) {
+        // TODO Why is this needed?
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
 }
 
+bool AAudioServiceEndpointMMAP::close_l() { // requires mMmapStreamLock
+    bool closedIt = false;
+    if (mMmapStream != nullptr) {
+        // Needs to be explicitly cleared or CTS will fail but it is not clear why.
+        ALOGD("%s() clear mMmapStream", __func__);
+        mMmapStream.clear();
+        closedIt = true;
+    }
+    return closedIt;
+}
+
 aaudio_result_t AAudioServiceEndpointMMAP::startStream(sp<AAudioServiceStreamBase> stream,
                                                    audio_port_handle_t *clientHandle __unused) {
     // Start the client on behalf of the AAudio service.
@@ -318,7 +335,7 @@
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::stopStream(sp<AAudioServiceStreamBase> /*stream*/,
-                                                      audio_port_handle_t /*clientHandle*/) {
+                                                      audio_port_handle_t clientHandle) {
     mFramesTransferred.reset32();
 
     // Round 64-bit counter up to a multiple of the buffer capacity.
@@ -328,36 +345,68 @@
     mFramesTransferred.roundUp64(getBufferCapacity());
 
     // Use the port handle that was provided by openMmapStream().
-    ALOGV("%s() mPortHandle = %d", __func__, mPortHandle);
-    return stopClient(mPortHandle);
+    aaudio_result_t result = stopClient(mPortHandle);
+    ALOGD("%s(%d): called stopClient(%d=mPortHandle), returning %d", __func__,
+          (int)clientHandle, mPortHandle, result);
+    return result;
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::startClient(const android::AudioClient& client,
                                                        const audio_attributes_t *attr,
-                                                       audio_port_handle_t *clientHandle) {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->start(client, attr, clientHandle));
+                                                       audio_port_handle_t *portHandlePtr) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    } else if (!isConnected()) {
+        ALOGD("%s(): MMAP stream was disconnected", __func__);
+        return AAUDIO_ERROR_DISCONNECTED;
+    } else {
+        aaudio_result_t result = AAudioConvert_androidToAAudioResult(
+                mMmapStream->start(client, attr, portHandlePtr));
+        if (!isConnected() && (portHandlePtr != nullptr)) {
+            ALOGD("%s(): MMAP stream DISCONNECTED after starting port %d, will stop it",
+                  __func__, *portHandlePtr);
+            mMmapStream->stop(*portHandlePtr);
+            *portHandlePtr = AUDIO_PORT_HANDLE_NONE;
+            result = AAUDIO_ERROR_DISCONNECTED;
+        }
+        ALOGD("%s(): returning port %d, result %d", __func__,
+              (portHandlePtr == nullptr) ? -1 : *portHandlePtr, result);
+        return result;
+    }
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t clientHandle) {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
+aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t portHandle) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGE("%s(%d): called after mMmapStream set to NULL", __func__, (int)portHandle);
+        return AAUDIO_ERROR_NULL;
+    } else {
+        aaudio_result_t result = AAudioConvert_androidToAAudioResult(
+                mMmapStream->stop(portHandle));
+        ALOGD("%s(%d): returning %d", __func__, (int)portHandle, result);
+        return result;
+    }
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::standby() {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->standby());
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    } else {
+        return AAudioConvert_androidToAAudioResult(mMmapStream->standby());
+    }
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::exitStandby(AudioEndpointParcelable* parcelable) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mMmapStream == nullptr) {
         return AAUDIO_ERROR_NULL;
     }
     mAudioDataWrapper->reset();
-    const aaudio_result_t result = createMmapBuffer();
+    const aaudio_result_t result = createMmapBuffer_l();
     if (result == AAUDIO_OK) {
         getDownDataDescription(parcelable);
     }
@@ -367,10 +416,12 @@
 // Get free-running DSP or DMA hardware position from the HAL.
 aaudio_result_t AAudioServiceEndpointMMAP::getFreeRunningPosition(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
-    struct audio_mmap_position position;
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
         return AAUDIO_ERROR_NULL;
     }
+    struct audio_mmap_position position;
     const status_t status = mMmapStream->getMmapPosition(&position);
     ALOGV("%s() status= %d, pos = %d, nanos = %lld\n",
           __func__, status, position.position_frames, (long long) position.time_nanoseconds);
@@ -475,9 +526,14 @@
 aaudio_result_t AAudioServiceEndpointMMAP::getExternalPosition(uint64_t *positionFrames,
                                                                int64_t *timeNanos)
 {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mHalExternalPositionStatus != AAUDIO_OK) {
         return mHalExternalPositionStatus;
     }
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    }
     uint64_t tempPositionFrames;
     int64_t tempTimeNanos;
     const status_t status = mMmapStream->getExternalPosition(&tempPositionFrames, &tempTimeNanos);
@@ -552,13 +608,20 @@
     return mHalExternalPositionStatus;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer()
+// mMmapStreamLock should be held when calling this function.
+aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer_l()
 {
     memset(&mMmapBufferinfo, 0, sizeof(struct audio_mmap_buffer_info));
     int32_t minSizeFrames = getBufferCapacity();
     if (minSizeFrames <= 0) { // zero will get rejected
         minSizeFrames = AAUDIO_BUFFER_CAPACITY_MIN;
     }
+
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    }
+
     const status_t status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
     const bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
     if (status != OK) {
@@ -598,6 +661,7 @@
     // Call to HAL to make sure the transport FD was able to be closed by binder.
     // This is a tricky workaround for a problem in Binder.
     // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
+    ALOGD("%s() - call getMmapPosition() as a hack to clear FD stuck in Binder", __func__);
     struct audio_mmap_position position;
     mMmapStream->getMmapPosition(&position);
 
@@ -613,11 +677,14 @@
 }
 
 void AAudioServiceEndpointMMAP::reportData() {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+
     if (mMmapStream == nullptr) {
         // This must not happen
         ALOGE("%s() invalid state, mmap stream is not initialized", __func__);
         return;
     }
+
     auto fifo = mAudioDataWrapper->getFifoBuffer();
     if (fifo == nullptr) {
         ALOGE("%s() fifo buffer is not initialized, cannot report data", __func__);
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index eaa578c..962d390 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -50,7 +50,7 @@
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    void close() override;
+    void close() override EXCLUDES(mMmapStreamLock);
 
     aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                 audio_port_handle_t *clientHandle) override;
@@ -60,15 +60,19 @@
 
     aaudio_result_t startClient(const android::AudioClient& client,
                                 const audio_attributes_t *attr,
-                                audio_port_handle_t *clientHandle)  override;
+                                audio_port_handle_t *clientHandle)  override
+                                EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t stopClient(audio_port_handle_t clientHandle)  override;
+    aaudio_result_t stopClient(audio_port_handle_t clientHandle)  override
+            EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t standby() override;
+    aaudio_result_t standby() override EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) override;
+    aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) override
+            EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override
+             EXCLUDES(mMmapStreamLock);
 
     aaudio_result_t getTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
 
@@ -88,22 +92,31 @@
         return mHardwareTimeOffsetNanos;
     }
 
-    aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
+    aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos)
+            EXCLUDES(mMmapStreamLock);
 
-    int64_t nextDataReportTime();
+    int64_t nextDataReportTime() EXCLUDES(mMmapStreamLock);
 
-    void reportData();
+    void reportData() EXCLUDES(mMmapStreamLock);
 
 private:
 
-    aaudio_result_t openWithConfig(audio_config_base_t* config);
+    /**
+     *
+     * @return true if mMapStream was cleared
+     */
+    bool close_l() REQUIRES(mMmapStreamLock);
 
-    aaudio_result_t createMmapBuffer();
+    aaudio_result_t openWithConfig(audio_config_base_t* config) EXCLUDES(mMmapStreamLock);
+
+    aaudio_result_t createMmapBuffer_l() REQUIRES(mMmapStreamLock);
 
     MonotonicCounter                          mFramesTransferred;
 
     // Interface to the AudioFlinger MMAP support.
-    android::sp<android::MmapStreamInterface> mMmapStream;
+    mutable std::mutex                        mMmapStreamLock;
+    android::sp<android::MmapStreamInterface> mMmapStream GUARDED_BY(mMmapStreamLock);
+
     struct audio_mmap_buffer_info             mMmapBufferinfo;
 
     // There is only one port associated with an MMAP endpoint.
diff --git a/services/oboeservice/AAudioServiceEndpointPlay.cpp b/services/oboeservice/AAudioServiceEndpointPlay.cpp
index 637405d..5d6e2ae 100644
--- a/services/oboeservice/AAudioServiceEndpointPlay.cpp
+++ b/services/oboeservice/AAudioServiceEndpointPlay.cpp
@@ -88,7 +88,8 @@
                 }
 
                 aaudio_stream_state_t state = clientStream->getState();
-                if (state == AAUDIO_STREAM_STATE_STOPPING) {
+                if (state == AAUDIO_STREAM_STATE_STOPPING ||
+                    state == AAUDIO_STREAM_STATE_PAUSING) {
                     allowUnderflow = false; // just read what is already in the FIFO
                 } else if (state != AAUDIO_STREAM_STATE_STARTED) {
                     continue; // this stream is not running so skip it.
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index dc70c79..78cf706 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -22,6 +22,7 @@
 #include <iostream>
 #include <mutex>
 
+#include <com_android_media_aaudio.h>
 #include <media/MediaMetricsItem.h>
 #include <media/TypeConverter.h>
 #include <mediautils/SchedulingPolicyService.h>
@@ -219,13 +220,17 @@
     return closeAndClear();
 }
 
-aaudio_result_t AAudioServiceStreamBase::startDevice() {
+aaudio_result_t AAudioServiceStreamBase::startDevice_l() {
     mClientHandle = AUDIO_PORT_HANDLE_NONE;
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
+    if (!endpoint->isConnected()) {
+        ALOGE("%s() endpoint was already disconnected", __func__);
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     return endpoint->startStream(this, &mClientHandle);
 }
 
@@ -274,7 +279,7 @@
     mAtomicStreamTimestamp.clear();
 
     mClientHandle = AUDIO_PORT_HANDLE_NONE;
-    result = startDevice();
+    result = startDevice_l();
     if (result != AAUDIO_OK) goto error;
 
     // This should happen at the end of the start.
@@ -307,6 +312,8 @@
             .set(AMEDIAMETRICS_PROP_STATUS, (int32_t)result)
             .record(); });
 
+    setState(AAUDIO_STREAM_STATE_PAUSING);
+
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -331,6 +338,7 @@
 aaudio_result_t AAudioServiceStreamBase::stop_l() {
     aaudio_result_t result = AAUDIO_OK;
     if (!isRunning()) {
+        ALOGW("%s() stream not running, returning early", __func__);
         return result;
     }
     const int64_t beginNs = AudioClock::getNanoseconds();
@@ -395,6 +403,7 @@
 }
 
 // implement Runnable, periodically send timestamps to client and process commands from queue.
+// Enter standby mode if idle for a while.
 __attribute__((no_sanitize("integer")))
 void AAudioServiceStreamBase::run() {
     ALOGD("%s() %s entering >>>>>>>>>>>>>> COMMANDS", __func__, getTypeText());
@@ -403,6 +412,7 @@
     TimestampScheduler timestampScheduler;
     int64_t nextTimestampReportTime;
     int64_t nextDataReportTime;
+    // When to try to enter standby.
     int64_t standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
     // Balance the incStrong from when the thread was launched.
     holdStream->decStrong(nullptr);
@@ -414,28 +424,26 @@
     int32_t loopCount = 0;
     while (mThreadEnabled.load()) {
         loopCount++;
-        int64_t timeoutNanos = -1;
-        if (isDisconnected_l()) {
-            if (!isStandby_l()) {
-                // If the stream is disconnected but not in standby mode, wait until standby time.
+        int64_t timeoutNanos = -1; // wait forever
+        if (isDisconnected_l() || isIdle_l()) {
+            if (isStandbyImplemented() && !isStandby_l()) {
+                // If not in standby mode, wait until standby time.
                 timeoutNanos = standbyTime - AudioClock::getNanoseconds();
                 timeoutNanos = std::max<int64_t>(0, timeoutNanos);
-            } // else {
-                // If the stream is disconnected and in standby mode, keep `timeoutNanos` as
-                // -1 to wait forever until next command as the stream can only be closed.
-            // }
-        } else if (isRunning() || (isIdle_l() && !isStandby_l())) {
-            timeoutNanos = (isRunning() ? std::min(nextTimestampReportTime, nextDataReportTime)
-                                        : standbyTime) - AudioClock::getNanoseconds();
+            }
+            // Otherwise, keep `timeoutNanos` as -1 to wait forever until next command.
+        } else if (isRunning()) {
+            timeoutNanos = std::min(nextTimestampReportTime, nextDataReportTime)
+                    - AudioClock::getNanoseconds();
             timeoutNanos = std::max<int64_t>(0, timeoutNanos);
         }
-
         auto command = mCommandQueue.waitForCommand(timeoutNanos);
         if (!mThreadEnabled) {
             // Break the loop if the thread is disabled.
             break;
         }
 
+        // Is it time to send timestamps?
         if (isRunning() && !isDisconnected_l()) {
             auto currentTimestamp = AudioClock::getNanoseconds();
             if (currentTimestamp >= nextDataReportTime) {
@@ -451,19 +459,24 @@
                 nextTimestampReportTime = timestampScheduler.nextAbsoluteTime();
             }
         }
-        if ((isIdle_l() || isDisconnected_l()) && AudioClock::getNanoseconds() >= standbyTime) {
+
+        // Is it time to enter standby?
+        if ((isIdle_l() || isDisconnected_l())
+                && isStandbyImplemented()
+                && !isStandby_l()
+                && (AudioClock::getNanoseconds() >= standbyTime)) {
+            ALOGD("%s() call standby_l(), %d loops", __func__, loopCount);
             aaudio_result_t result = standby_l();
             if (result != AAUDIO_OK) {
-                // If standby failed because of the function is not implemented, there is no
-                // need to retry. Otherwise, retry standby later.
-                ALOGW("Failed to enter standby, error=%d", result);
-                standbyTime = result == AAUDIO_ERROR_UNIMPLEMENTED
-                        ? std::numeric_limits<int64_t>::max()
-                        : AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
+                ALOGW("Failed to enter standby, error = %d", result);
+                // Try again later.
+                standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
             }
         }
 
         if (command != nullptr) {
+            ALOGD("%s() got COMMAND opcode %d after %d loops",
+                    __func__, command->operationCode, loopCount);
             std::scoped_lock<std::mutex> _commandLock(command->lock);
             switch (command->operationCode) {
                 case START:
@@ -518,6 +531,18 @@
                                                        : exitStandby_l(param->mParcelable);
                     standbyTime = AudioClock::getNanoseconds() + IDLE_TIMEOUT_NANOS;
                 } break;
+                case START_CLIENT: {
+                    auto param = (StartClientParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : startClient_l(param->mClient,
+                                                                       param->mAttr,
+                                                                       param->mClientHandle);
+                } break;
+                case STOP_CLIENT: {
+                    auto param = (StopClientParam *) command->parameter.get();
+                    command->result = param == nullptr ? AAUDIO_ERROR_ILLEGAL_ARGUMENT
+                                                       : stopClient_l(param->mClientHandle);
+                } break;
                 default:
                     ALOGE("Invalid command op code: %d", command->operationCode);
                     break;
@@ -730,6 +755,26 @@
     return mCommandQueue.sendCommand(command);
 }
 
+aaudio_result_t AAudioServiceStreamBase::sendStartClientCommand(const android::AudioClient &client,
+                                                                const audio_attributes_t *attr,
+                                                                audio_port_handle_t *clientHandle) {
+    auto command = std::make_shared<AAudioCommand>(
+            START_CLIENT,
+            std::make_shared<StartClientParam>(client, attr, clientHandle),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
+}
+
+aaudio_result_t AAudioServiceStreamBase::sendStopClientCommand(audio_port_handle_t clientHandle) {
+    auto command = std::make_shared<AAudioCommand>(
+            STOP_CLIENT,
+            std::make_shared<StopClientParam>(clientHandle),
+            true /*waitForReply*/,
+            TIMEOUT_NANOS);
+    return mCommandQueue.sendCommand(command);
+}
+
 void AAudioServiceStreamBase::onVolumeChanged(float volume) {
     sendServiceEvent(AAUDIO_SERVICE_EVENT_VOLUME, volume);
 }
diff --git a/services/oboeservice/AAudioServiceStreamBase.h b/services/oboeservice/AAudioServiceStreamBase.h
index 96a6d44..20737bc 100644
--- a/services/oboeservice/AAudioServiceStreamBase.h
+++ b/services/oboeservice/AAudioServiceStreamBase.h
@@ -279,7 +279,7 @@
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
      */
-    virtual aaudio_result_t startDevice();
+    virtual aaudio_result_t startDevice_l() REQUIRES(mLock);
 
     aaudio_result_t writeUpMessageQueue(AAudioServiceMessage *command)
             EXCLUDES(mUpMessageQueueLock);
@@ -288,6 +288,12 @@
 
     aaudio_result_t sendXRunCount(int32_t xRunCount);
 
+    aaudio_result_t sendStartClientCommand(const android::AudioClient& client,
+                                           const audio_attributes_t *attr,
+                                           audio_port_handle_t *clientHandle) EXCLUDES(mLock);
+
+    aaudio_result_t sendStopClientCommand(audio_port_handle_t clientHandle) EXCLUDES(mLock);
+
     /**
      * @param positionFrames
      * @param timeNanos
@@ -310,9 +316,14 @@
         mDisconnected = flag;
     }
 
+    // If you implemented this method then please also override isStandbyImplemented().
     virtual aaudio_result_t standby_l() REQUIRES(mLock) {
         return AAUDIO_ERROR_UNIMPLEMENTED;
     }
+    virtual bool isStandbyImplemented() {
+        return false;
+    }
+
     class ExitStandbyParam : public AAudioCommandParam {
     public:
         explicit ExitStandbyParam(AudioEndpointParcelable* parcelable)
@@ -342,6 +353,40 @@
     }
     virtual void reportData_l() REQUIRES(mLock) { return; }
 
+    class StartClientParam : public AAudioCommandParam {
+    public:
+        StartClientParam(const android::AudioClient& client, const audio_attributes_t* attr,
+                         audio_port_handle_t* clientHandle)
+                : AAudioCommandParam(), mClient(client), mAttr(attr), mClientHandle(clientHandle) {
+        }
+        ~StartClientParam() override = default;
+
+        android::AudioClient mClient;
+        const audio_attributes_t* mAttr;
+        audio_port_handle_t* mClientHandle;
+    };
+    virtual aaudio_result_t startClient_l(
+            const android::AudioClient& client,
+            const audio_attributes_t *attr __unused,
+            audio_port_handle_t *clientHandle __unused) REQUIRES(mLock) {
+        ALOGD("AAudioServiceStreamBase::startClient_l(%p, ...) AAUDIO_ERROR_UNAVAILABLE", &client);
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
+    class StopClientParam : public AAudioCommandParam {
+    public:
+        explicit StopClientParam(audio_port_handle_t clientHandle)
+                : AAudioCommandParam(), mClientHandle(clientHandle) {
+        }
+        ~StopClientParam() override = default;
+
+        audio_port_handle_t mClientHandle;
+    };
+    virtual aaudio_result_t stopClient_l(audio_port_handle_t clientHandle) REQUIRES(mLock) {
+        ALOGD("AAudioServiceStreamBase::stopClient(%d) AAUDIO_ERROR_UNAVAILABLE", clientHandle);
+        return AAUDIO_ERROR_UNAVAILABLE;
+    }
+
     pid_t                   mRegisteredClientThread = ILLEGAL_THREAD_ID;
 
     std::mutex              mUpMessageQueueLock;
@@ -358,6 +403,8 @@
         UNREGISTER_AUDIO_THREAD,
         GET_DESCRIPTION,
         EXIT_STANDBY,
+        START_CLIENT,
+        STOP_CLIENT,
     };
     AAudioThread            mCommandThread;
     std::atomic_bool        mThreadEnabled{false};
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index 89f6e33..5203e50 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -24,6 +24,7 @@
 #include <iostream>
 #include <stdint.h>
 
+#include <com_android_media_aaudio.h>
 #include <utils/String16.h>
 #include <media/nbaio/AudioStreamOutSink.h>
 #include <media/MmapStreamInterface.h>
@@ -83,11 +84,12 @@
 }
 
 // Start the flow of data.
-aaudio_result_t AAudioServiceStreamMMAP::startDevice() {
-    aaudio_result_t result = AAudioServiceStreamBase::startDevice();
+aaudio_result_t AAudioServiceStreamMMAP::startDevice_l() {
+    aaudio_result_t result = AAudioServiceStreamBase::startDevice_l();
     if (!mInService && result == AAUDIO_OK) {
         // Note that this can sometimes take 200 to 300 msec for a cold start!
-        result = startClient(mMmapClient, nullptr /*const audio_attributes_t* */, &mClientHandle);
+        result = startClient_l(
+                mMmapClient, nullptr /*const audio_attributes_t* */, &mClientHandle);
     }
     return result;
 }
@@ -100,7 +102,7 @@
     aaudio_result_t result = AAudioServiceStreamBase::pause_l();
     // TODO put before base::pause()?
     if (!mInService) {
-        (void) stopClient(mClientHandle);
+        (void) stopClient_l(mClientHandle);
     }
     return result;
 }
@@ -112,7 +114,7 @@
     aaudio_result_t result = AAudioServiceStreamBase::stop_l();
     // TODO put before base::stop()?
     if (!mInService) {
-        (void) stopClient(mClientHandle);
+        (void) stopClient_l(mClientHandle);
     }
     return result;
 }
@@ -148,7 +150,40 @@
 
 aaudio_result_t AAudioServiceStreamMMAP::startClient(const android::AudioClient& client,
                                                      const audio_attributes_t *attr,
-                                                     audio_port_handle_t *clientHandle) {
+                                                     audio_port_handle_t *portHandlePtr) {
+    if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
+        return sendStartClientCommand(client, attr, portHandlePtr);
+    } else {
+        sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+        if (endpoint == nullptr) {
+            ALOGE("%s() has no endpoint", __func__);
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        // Start the client on behalf of the application. Generate a new porthandle.
+        aaudio_result_t result = endpoint->startClient(client, attr, portHandlePtr);
+        ALOGD("%s() flag off, got port %d", __func__,
+              ((portHandlePtr == nullptr) ? -1 : *portHandlePtr));
+        return result;
+    }
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::stopClient(audio_port_handle_t clientHandle) {
+    if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
+        return sendStopClientCommand(clientHandle);
+    } else {
+        sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
+        if (endpoint == nullptr) {
+            ALOGE("%s() has no endpoint", __func__);
+            return AAUDIO_ERROR_INVALID_STATE;
+        }
+        aaudio_result_t result = endpoint->stopClient(clientHandle);
+        return result;
+    }
+}
+
+aaudio_result_t AAudioServiceStreamMMAP::startClient_l(const android::AudioClient& client,
+                                                       const audio_attributes_t *attr,
+                                                       audio_port_handle_t *clientHandle) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
@@ -159,7 +194,7 @@
     return result;
 }
 
-aaudio_result_t AAudioServiceStreamMMAP::stopClient(audio_port_handle_t clientHandle) {
+aaudio_result_t AAudioServiceStreamMMAP::stopClient_l(audio_port_handle_t clientHandle) {
     sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
     if (endpoint == nullptr) {
         ALOGE("%s() has no endpoint", __func__);
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.h b/services/oboeservice/AAudioServiceStreamMMAP.h
index 42032d7..f20ea10 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.h
+++ b/services/oboeservice/AAudioServiceStreamMMAP.h
@@ -73,6 +73,9 @@
     aaudio_result_t stop_l() REQUIRES(mLock) override;
 
     aaudio_result_t standby_l() REQUIRES(mLock) override;
+    bool isStandbyImplemented() override {
+        return true;
+    }
 
     aaudio_result_t exitStandby_l(AudioEndpointParcelable* parcelable) REQUIRES(mLock) override;
 
@@ -93,7 +96,13 @@
      * Device specific startup.
      * @return AAUDIO_OK or negative error.
      */
-    aaudio_result_t startDevice() override;
+    aaudio_result_t startDevice_l() REQUIRES(mLock) override;
+
+    aaudio_result_t startClient_l(const android::AudioClient& client,
+                                  const audio_attributes_t *attr,
+                                  audio_port_handle_t *clientHandle) REQUIRES(mLock) override;
+
+    aaudio_result_t stopClient_l(audio_port_handle_t clientHandle) REQUIRES(mLock) override;
 
 private:
 
diff --git a/services/oboeservice/Android.bp b/services/oboeservice/Android.bp
index e3601a1..67b319f 100644
--- a/services/oboeservice/Android.bp
+++ b/services/oboeservice/Android.bp
@@ -84,6 +84,7 @@
     shared_libs: [
         "aaudio-aidl-cpp",
         "com.android.media.aaudio-aconfig-cc",
+        "com.android.media.aaudio-aconfig-cc",
         "framework-permission-aidl-cpp",
         "libaaudio_internal",
         "libaudioclient",
@@ -157,6 +158,8 @@
         "frameworks/av/media/libnbaio/include_mono",
     ],
 
+    export_include_dirs: ["."],
+
     tidy: true,
     tidy_checks: tidy_errors,
     tidy_checks_as_errors: tidy_errors,
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index e29d520..5d7bf68 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -15,7 +15,7 @@
     imports: [
         "android.hardware.common-V2",
         "android.hardware.common.fmq-V1",
-        "android.hardware.tv.tuner-V2",
+        "android.hardware.tv.tuner-V3",
     ],
     backend: {
         java: {
@@ -41,7 +41,7 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
-        "android.hardware.tv.tuner-V2-ndk",
+        "android.hardware.tv.tuner-V3-ndk",
         "libbase",
         "libbinder",
         "libbinder_ndk",
@@ -84,7 +84,7 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
-        "android.hardware.tv.tuner-V2-ndk",
+        "android.hardware.tv.tuner-V3-ndk",
         "libbase",
         "libcutils",
         "libbinder",
diff --git a/services/tuner/TunerFilter.cpp b/services/tuner/TunerFilter.cpp
index 84a2b4e..e393c44 100644
--- a/services/tuner/TunerFilter.cpp
+++ b/services/tuner/TunerFilter.cpp
@@ -335,26 +335,40 @@
 
 /////////////// FilterCallback ///////////////////////
 ::ndk::ScopedAStatus TunerFilter::FilterCallback::onFilterStatus(DemuxFilterStatus status) {
-    Mutex::Autolock _l(mCallbackLock);
-    if (mTunerFilterCallback != nullptr) {
-        mTunerFilterCallback->onFilterStatus(status);
+    shared_ptr<ITunerFilterCallback> cb(nullptr);
+    {
+        Mutex::Autolock _l(mCallbackLock);
+        cb = mTunerFilterCallback;
+    }
+    if (cb != nullptr) {
+        cb->onFilterStatus(status);
     }
     return ::ndk::ScopedAStatus::ok();
 }
 
 ::ndk::ScopedAStatus TunerFilter::FilterCallback::onFilterEvent(
         const vector<DemuxFilterEvent>& events) {
-    Mutex::Autolock _l(mCallbackLock);
-    if (mTunerFilterCallback != nullptr) {
-        mTunerFilterCallback->onFilterEvent(events);
+    shared_ptr<ITunerFilterCallback> cb(nullptr);
+    {
+        Mutex::Autolock _l(mCallbackLock);
+        cb = mTunerFilterCallback;
+    }
+    if (cb != nullptr) {
+        cb->onFilterEvent(events);
     }
     return ::ndk::ScopedAStatus::ok();
 }
 
 void TunerFilter::FilterCallback::sendSharedFilterStatus(int32_t status) {
-    Mutex::Autolock _l(mCallbackLock);
-    if (mTunerFilterCallback != nullptr && mOriginalCallback != nullptr) {
-        mTunerFilterCallback->onFilterStatus(static_cast<DemuxFilterStatus>(status));
+    shared_ptr<ITunerFilterCallback> cb(nullptr);
+    shared_ptr<ITunerFilterCallback> orig_cb(nullptr);
+    {
+        Mutex::Autolock _l(mCallbackLock);
+        cb = mTunerFilterCallback;
+        orig_cb = mOriginalCallback;
+    }
+    if (cb != nullptr && orig_cb != nullptr) {
+        cb->onFilterStatus(static_cast<DemuxFilterStatus>(status));
     }
 }
 
diff --git a/tools/mainline_hook_partial.sh b/tools/mainline_hook_partial.sh
index 63ae4c0..978dc02 100755
--- a/tools/mainline_hook_partial.sh
+++ b/tools/mainline_hook_partial.sh
Binary files differ
diff --git a/tools/mainline_hook_project.sh b/tools/mainline_hook_project.sh
index d58143e..65f4073 100755
--- a/tools/mainline_hook_project.sh
+++ b/tools/mainline_hook_project.sh
@@ -16,8 +16,9 @@
 
 
 # tunables
+# as of 2024/5, things are all on the same branch
 DEV_BRANCH=main
-MAINLINE_BRANCH=udc-mainline-prod
+MAINLINE_BRANCH=main
 
 ###
 RED=$(tput setaf 1)