Merge "Add README and documentation to VirtualCamera" into main
diff --git a/Android.bp b/Android.bp
index c11e38a..0c7ed6e 100644
--- a/Android.bp
+++ b/Android.bp
@@ -160,11 +160,11 @@
             imports: ["android.hardware.audio.core-V2"],
         },
     ],
-    frozen: true,
+    frozen: false,
 
 }
 
-latest_av_audio_types_aidl = "av-audio-types-aidl-V1"
+latest_av_audio_types_aidl = "av-audio-types-aidl-V2"
 
 cc_defaults {
     name: "latest_av_audio_types_aidl_ndk_shared",
diff --git a/aidl/com/android/media/permission/PermissionEnum.aidl b/aidl/com/android/media/permission/PermissionEnum.aidl
index 834a275..b08db44 100644
--- a/aidl/com/android/media/permission/PermissionEnum.aidl
+++ b/aidl/com/android/media/permission/PermissionEnum.aidl
@@ -21,11 +21,21 @@
  * {@hide}
  */
 enum PermissionEnum {
-    MODIFY_AUDIO_ROUTING = 0,
-    MODIFY_PHONE_STATE = 1,
-    CALL_AUDIO_INTERCEPTION = 2,
     // This is a runtime + WIU permission, which means data delivery should be protected by AppOps
     // We query the controller only for early fails/hard errors
-    RECORD_AUDIO = 3,
-    ENUM_SIZE = 4, // Not for actual usage
+    RECORD_AUDIO = 0,
+    MODIFY_AUDIO_ROUTING = 1,
+    MODIFY_AUDIO_SETTINGS = 2,
+    MODIFY_PHONE_STATE = 3,
+    MODIFY_DEFAULT_AUDIO_EFFECTS = 4,
+    WRITE_SECURE_SETTINGS = 5,
+    CALL_AUDIO_INTERCEPTION = 6,
+    ACCESS_ULTRASOUND = 7,
+    CAPTURE_AUDIO_OUTPUT = 8,
+    CAPTURE_MEDIA_OUTPUT = 9,
+    CAPTURE_AUDIO_HOTWORD = 10,
+    CAPTURE_TUNER_AUDIO_INPUT = 11,
+    CAPTURE_VOICE_COMMUNICATION_OUTPUT = 12,
+    BLUETOOTH_CONNECT = 13,
+    ENUM_SIZE = 14, // Not for actual usage, used by Java
 }
diff --git a/camera/Android.bp b/camera/Android.bp
index 75c2999..d91fcb2 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -75,6 +75,7 @@
         local_include_dirs: ["aidl"],
         include_dirs: [
             "frameworks/native/aidl/gui",
+            "frameworks/native/libs/permission/aidl",
         ],
     },
 
@@ -86,54 +87,57 @@
 
         // Source for camera interface parcelables, and manually-written interfaces
         "Camera.cpp",
+        "CameraBase.cpp",
         "CameraMetadata.cpp",
         "CameraParameters.cpp",
-        "CaptureResult.cpp",
         "CameraParameters2.cpp",
         "CameraSessionStats.cpp",
+        "CameraUtils.cpp",
+        "CaptureResult.cpp",
         "ICamera.cpp",
         "ICameraClient.cpp",
         "ICameraRecordingProxy.cpp",
+        "VendorTagDescriptor.cpp",
         "camera2/CaptureRequest.cpp",
         "camera2/ConcurrentCamera.cpp",
         "camera2/OutputConfiguration.cpp",
         "camera2/SessionConfiguration.cpp",
         "camera2/SubmitInfo.cpp",
-        "CameraBase.cpp",
-        "CameraUtils.cpp",
-        "VendorTagDescriptor.cpp",
     ],
 
     shared_libs: [
         "camera_platform_flags_c_lib",
-        "libbase",
-        "libcutils",
-        "libutils",
-        "liblog",
-        "libbinder",
-        "libgui",
-        "libcamera_metadata",
-        "libnativewindow",
+        "framework-permission-aidl-cpp",
         "lib-platform-compat-native-api",
+        "libbase",
+        "libbinder",
+        "libcamera_metadata",
+        "libcutils",
+        "libgui",
+        "liblog",
+        "libnativewindow",
+        "libpermission",
+        "libutils",
     ],
 
     include_dirs: [
-        "system/media/private/camera/include",
         "frameworks/native/include/media/openmax",
+        "system/media/private/camera/include",
     ],
     export_include_dirs: [
         "include",
         "include/camera",
     ],
     export_shared_lib_headers: [
+        "framework-permission-aidl-cpp",
         "libcamera_metadata",
-        "libnativewindow",
         "libgui",
+        "libnativewindow",
     ],
 
     cflags: [
-        "-Werror",
         "-Wall",
+        "-Werror",
         "-Wextra",
     ],
 
@@ -153,8 +157,8 @@
     ],
 
     include_dirs: [
-        "system/media/private/camera/include",
         "frameworks/native/include/media/openmax",
+        "system/media/private/camera/include",
     ],
 
     export_include_dirs: [
@@ -168,6 +172,7 @@
     name: "libcamera_client_aidl",
     srcs: [
         "aidl/android/hardware/CameraExtensionSessionStats.aidl",
+        "aidl/android/hardware/CameraFeatureCombinationStats.aidl",
         "aidl/android/hardware/ICameraService.aidl",
         "aidl/android/hardware/ICameraServiceListener.aidl",
         "aidl/android/hardware/ICameraServiceProxy.aidl",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 8018390..d90f7c9 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -69,13 +69,12 @@
     // deadlock if we call any method of ICamera here.
 }
 
-sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion, int rotationOverride,
-        bool forceSlowJpegMode, int32_t deviceId, int32_t devicePolicy)
+sp<Camera> Camera::connect(int cameraId, int targetSdkVersion, int rotationOverride,
+        bool forceSlowJpegMode, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy)
 {
-    return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
-            devicePolicy);
+    return CameraBaseT::connect(cameraId, targetSdkVersion, rotationOverride,
+            forceSlowJpegMode, clientAttribution, devicePolicy);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index d7415a3..774db25 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -161,10 +161,10 @@
 
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
-                                               const std::string& clientPackageName,
-                                               int clientUid, int clientPid, int targetSdkVersion,
-                                               int rotationOverride, bool forceSlowJpegMode,
-                                               int32_t deviceId, int32_t devicePolicy)
+                                               int targetSdkVersion, int rotationOverride,
+                                               bool forceSlowJpegMode,
+                                               const AttributionSourceState& clientAttribution,
+                                               int32_t devicePolicy)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -176,9 +176,9 @@
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
         ALOGI("Connect camera (legacy API) - rotationOverride %d, forceSlowJpegMode %d",
                 rotationOverride, forceSlowJpegMode);
-        ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
-                devicePolicy, /*out*/ &c->mCamera);
+        ret = (cs.get()->*fnConnectService)(cl, cameraId, targetSdkVersion,
+                rotationOverride, forceSlowJpegMode, clientAttribution, devicePolicy,
+                /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -257,7 +257,8 @@
 }
 
 template <typename TCam, typename TCamTraits>
-int CameraBase<TCam, TCamTraits>::getNumberOfCameras(int32_t deviceId, int32_t devicePolicy) {
+int CameraBase<TCam, TCamTraits>::getNumberOfCameras(
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
 
     if (!cs.get()) {
@@ -266,7 +267,7 @@
     }
     int32_t count;
     binder::Status res = cs->getNumberOfCameras(
-            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, deviceId,
+            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, clientAttribution,
             devicePolicy, &count);
     if (!res.isOk()) {
         ALOGE("Error reading number of cameras: %s",
@@ -279,12 +280,12 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
-        int rotationOverride, int32_t deviceId, int32_t devicePolicy,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, deviceId, devicePolicy,
-            cameraInfo);
+    binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, clientAttribution,
+            devicePolicy, cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
 
diff --git a/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl b/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl
new file mode 100644
index 0000000..f4a11b1
--- /dev/null
+++ b/camera/aidl/android/hardware/CameraFeatureCombinationStats.aidl
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware;
+
+/**
+ * {@hide}
+ */
+parcelable CameraFeatureCombinationStats {
+    /**
+     * Values for feature combination queries
+     */
+    const long CAMERA_FEATURE_UNKNOWN = 0;
+    const long CAMERA_FEATURE_60_FPS = 1 << 0;
+    const long CAMERA_FEATURE_STABILIZATION = 1 << 1;
+    const long CAMERA_FEATURE_HLG10 = 1 << 2;
+    const long CAMERA_FEATURE_JPEG = 1 << 3;
+    const long CAMERA_FEATURE_JPEG_R = 1 << 4;
+    const long CAMERA_FEATURE_4K = 1 << 5;
+
+    /**
+     * Values for notifyFeatureCombinationStats type
+     */
+    enum QueryType {
+        QUERY_FEATURE_COMBINATION = 0,
+        QUERY_SESSION_CHARACTERISTICS = 1,
+    }
+
+    @utf8InCpp String mCameraId;
+    int mUid;
+    long mFeatureCombination;
+    int mQueryType;
+    int mStatus;
+}
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index d9a0934..ce6c2d3 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -16,6 +16,7 @@
 
 package android.hardware;
 
+import android.content.AttributionSourceState;
 import android.hardware.ICamera;
 import android.hardware.ICameraClient;
 import android.hardware.camera2.ICameraDeviceUser;
@@ -66,13 +67,13 @@
      *
      * @param type The type of the camera, can be either CAMERA_TYPE_BACKWARD_COMPATIBLE
      *        or CAMERA_TYPE_ALL.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
-    int getNumberOfCameras(int type, int deviceId, int devicePolicy);
+    int getNumberOfCameras(int type, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * If changed, reflect in
@@ -97,19 +98,20 @@
      *        will override the sensor orientation and rotate and crop, while {@link
      *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
      *        without changing the sensor orientation.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      * @return CameraInfo for the camera.
      */
-    CameraInfo getCameraInfo(int cameraId, int rotationOverride, int deviceId,
-            int devicePolicy);
+    CameraInfo getCameraInfo(int cameraId, int rotationOverride,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
-     * Default UID/PID values for non-privileged callers of
-     * connect() and connectDevice()
+     * Default UID/PID values for non-privileged callers of connect() and connectDevice(). Can be
+     * used to set the pid/uid fields of AttributionSourceState to indicate the calling uid/pid
+     * should be used.
      */
     const int USE_CALLING_UID = -1;
     const int USE_CALLING_PID = -1;
@@ -118,9 +120,6 @@
      * Open a camera device through the old camera API.
      *
      * @param cameraId The ID of the camera to open.
-     * @param opPackageName The package name to report for the app-ops.
-     * @param clientUid UID for the calling client.
-     * @param clientPid PID for the calling client.
      * @param targetSdkVersion the target sdk level of the application calling this function.
      * @param rotationOverride Whether to override the sensor orientation information to
      *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
@@ -128,7 +127,7 @@
      *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
      *        without changing the sensor orientation.
      * @param forceSlowJpegMode Whether to force slow jpeg mode.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -136,12 +135,10 @@
      */
     ICamera connect(ICameraClient client,
             int cameraId,
-            @utf8InCpp String opPackageName,
-            int clientUid, int clientPid,
             int targetSdkVersion,
             int rotationOverride,
             boolean forceSlowJpegMode,
-            int deviceId,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 
     /**
@@ -149,15 +146,13 @@
      * Only supported for device HAL versions >= 3.2.
      *
      * @param cameraId The ID of the camera to open.
-     * @param opPackageName The package name to report for the app-ops.
-     * @param clientUid UID for the calling client.
      * @param targetSdkVersion the target sdk level of the application calling this function.
      * @param rotationOverride Whether to override the sensor orientation information to
      *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
      *        will override the sensor orientation and rotate and crop, while {@link
      *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
      *        without changing the sensor orientation.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -165,12 +160,10 @@
      */
     ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
             @utf8InCpp String cameraId,
-            @utf8InCpp String opPackageName,
-            @nullable @utf8InCpp String featureId,
-            int clientUid, int oomScoreOffset,
+            int oomScoreOffset,
             int targetSdkVersion,
             int rotationOverride,
-            int deviceId,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 
     /**
@@ -194,7 +187,7 @@
      *
      * @param sessions the set of camera id and session configuration pairs to be queried.
      * @param targetSdkVersion the target sdk level of the application calling this function.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -206,7 +199,7 @@
      */
     boolean isConcurrentSessionConfigurationSupported(
             in CameraIdAndSessionConfiguration[] sessions,
-            int targetSdkVersion, int deviceId, int devicePolicy);
+            int targetSdkVersion, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Inject Session Params into an existing camera session.
@@ -236,7 +229,7 @@
      *        will override the sensor orientation and rotate and crop, while {@link
      *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
      *        without changing the sensor orientation.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -244,7 +237,7 @@
      * @return Characteristics for the given camera.
      */
     CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
-            int rotationOverride, int deviceId, int devicePolicy);
+            int rotationOverride, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
@@ -284,14 +277,14 @@
      * Set the torch mode for a camera device.
      *
      * @param cameraId The ID of the camera to set torch mode for.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
     void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Change the brightness level of the flash unit associated with cameraId to strengthLevel.
@@ -299,27 +292,28 @@
      *
      * @param cameraId The ID of the camera.
      * @param strengthLevel The torch strength level to set for the camera.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
     void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel,
-            IBinder clientBinder, int deviceId, int devicePolicy);
+            IBinder clientBinder, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Get the brightness level of the flash unit associated with cameraId.
      *
      * @param cameraId The ID of the camera.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      * @return Torch strength level for the camera.
      */
-    int getTorchStrengthLevel(@utf8InCpp String cameraId, int deviceId, int devicePolicy);
+    int getTorchStrengthLevel(@utf8InCpp String cameraId,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Notify the camera service of a system event.  Should only be called from system_server.
@@ -385,7 +379,7 @@
      *
      * @param cameraId The camera id to create the CaptureRequest for.
      * @param templateId The template id create the CaptureRequest for.
-     * @param deviceId the device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -393,7 +387,7 @@
      * @return Metadata representing the CaptureRequest.
      */
     CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Check whether a particular session configuration with optional session parameters
@@ -402,7 +396,7 @@
      * @param cameraId The camera id to query session configuration for
      * @param targetSdkVersion the target sdk level of the application calling this function.
      * @param sessionConfiguration Specific session configuration to be verified.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -412,7 +406,7 @@
      */
     boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
             int targetSdkVersion, in SessionConfiguration sessionConfiguration,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Get the camera characteristics for a particular session configuration for
@@ -427,7 +421,7 @@
      *        without changing the sensor orientation.
      * @param sessionConfiguration Session configuration for which the characteristics
      *                             must be fetched.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -438,6 +432,6 @@
             int targetSdkVersion,
             int rotationOverride,
             in SessionConfiguration sessionConfiguration,
-            int deviceId,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 }
diff --git a/camera/aidl/android/hardware/ICameraServiceProxy.aidl b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
index dcd69b0..887a68b 100644
--- a/camera/aidl/android/hardware/ICameraServiceProxy.aidl
+++ b/camera/aidl/android/hardware/ICameraServiceProxy.aidl
@@ -16,6 +16,7 @@
 
 package android.hardware;
 
+import android.hardware.CameraFeatureCombinationStats;
 import android.hardware.CameraSessionStats;
 import android.hardware.CameraExtensionSessionStats;
 
@@ -38,6 +39,12 @@
     oneway void notifyCameraState(in CameraSessionStats cameraSessionStats);
 
     /**
+     * Notify feature combination query for a camera device.
+     */
+    oneway void notifyFeatureCombinationStats(
+            in CameraFeatureCombinationStats cameraFeatureCombinationStats);
+
+    /**
      * Returns the necessary rotate and crop override for the top activity which
      * will be one of ({@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_NONE},
      * {@link android.hardware.camera2.CameraMetadata#SCALER_ROTATE_AND_CROP_90},
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 3ecd10d..646b139 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -59,7 +59,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const std::string&, int, int, int, int, bool, int32_t, int32_t,
+        int, int, int, bool, const AttributionSourceState&, int32_t,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -81,10 +81,9 @@
             // construct a camera client from an existing remote
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
-                                const std::string& clientPackageName,
-                                int clientUid, int clientPid, int targetSdkVersion,
-                                int rotationOverride, bool forceSlowJpegMode,
-                                int32_t deviceId = kDefaultDeviceId, int32_t devicePolicy = 0);
+                                int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+                                const AttributionSourceState& clientAttribution,
+                                int32_t devicePolicy = 0);
 
             virtual     ~Camera();
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 3370b3d..d98abe4 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_HARDWARE_CAMERA_BASE_H
 #define ANDROID_HARDWARE_CAMERA_BASE_H
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/ICameraServiceListener.h>
 
 #include <utils/Mutex.h>
@@ -107,6 +108,7 @@
 
 } // namespace hardware
 
+using content::AttributionSourceState;
 using hardware::CameraInfo;
 
 template <typename TCam>
@@ -123,19 +125,19 @@
     typedef typename TCamTraits::TCamConnectService TCamConnectService;
 
     static sp<TCam>      connect(int cameraId,
-                                 const std::string& clientPackageName,
-                                 int clientUid, int clientPid, int targetSdkVersion,
-                                 int rotationOverride, bool forceSlowJpegMode,
-                                 int32_t deviceId, int32_t devicePolicy);
+                                 int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+                                 const AttributionSourceState &clientAttribution,
+                                 int32_t devicePolicy);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
 
-    static int           getNumberOfCameras(int32_t deviceId, int32_t devicePolicy);
+    static int           getNumberOfCameras(const AttributionSourceState& clientAttribution,
+                                            int32_t devicePolicy);
 
     static status_t      getCameraInfo(int cameraId,
                                        int rotationOverride,
-                                       int32_t deviceId,
+                                       const AttributionSourceState& clientAttribution,
                                        int32_t devicePolicy,
                                        /*out*/
                                        struct hardware::CameraInfo* cameraInfo);
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index 5577775..379c0b5 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -66,40 +66,41 @@
 cc_library_shared {
     name: "libcamera2ndk",
     srcs: [
+        "NdkCameraCaptureSession.cpp",
+        "NdkCameraDevice.cpp",
         "NdkCameraManager.cpp",
         "NdkCameraMetadata.cpp",
-        "NdkCameraDevice.cpp",
         "NdkCaptureRequest.cpp",
-        "NdkCameraCaptureSession.cpp",
+        "impl/ACameraCaptureSession.cpp",
+        "impl/ACameraDevice.cpp",
         "impl/ACameraManager.cpp",
         "impl/ACameraMetadata.cpp",
-        "impl/ACameraDevice.cpp",
-        "impl/ACameraCaptureSession.cpp",
     ],
     shared_libs: [
         "android.companion.virtual.virtualdevice_aidl-cpp",
         "android.companion.virtualdevice.flags-aconfig-cc",
-        "libbinder",
-        "liblog",
-        "libgui",
-        "libutils",
+        "framework-permission-aidl-cpp",
         "libandroid_runtime",
+        "libbinder",
         "libcamera_client",
-        "libstagefright_foundation",
-        "libcutils",
         "libcamera_metadata",
+        "libcutils",
+        "libgui",
+        "liblog",
         "libmediandk",
         "libnativewindow",
+        "libstagefright_foundation",
+        "libutils",
     ],
     header_libs: [
         "jni_headers",
     ],
     cflags: [
-        "-fvisibility=hidden",
         "-DEXPORT=__attribute__((visibility(\"default\")))",
         "-Wall",
-        "-Wextra",
         "-Werror",
+        "-Wextra",
+        "-fvisibility=hidden",
     ],
     // TODO: jchowdhary@, use header_libs instead b/131165718
     include_dirs: [
@@ -117,16 +118,16 @@
     cpp_std: "gnu++17",
     vendor: true,
     srcs: [
+        "NdkCameraCaptureSession.cpp",
+        "NdkCameraDevice.cpp",
+        "NdkCameraManager.cpp",
+        "NdkCameraMetadata.cpp",
+        "NdkCaptureRequest.cpp",
+        "impl/ACameraCaptureSession.cpp",
+        "impl/ACameraMetadata.cpp",
         "ndk_vendor/impl/ACameraDevice.cpp",
         "ndk_vendor/impl/ACameraManager.cpp",
         "ndk_vendor/impl/utils.cpp",
-        "impl/ACameraMetadata.cpp",
-        "impl/ACameraCaptureSession.cpp",
-        "NdkCameraMetadata.cpp",
-        "NdkCameraCaptureSession.cpp",
-        "NdkCameraManager.cpp",
-        "NdkCameraDevice.cpp",
-        "NdkCaptureRequest.cpp",
     ],
 
     export_include_dirs: ["include"],
@@ -135,30 +136,30 @@
     ],
     local_include_dirs: [
         ".",
-        "include",
         "impl",
+        "include",
     ],
     cflags: [
-        "-fvisibility=hidden",
         "-DEXPORT=__attribute__((visibility(\"default\")))",
         "-D__ANDROID_VNDK__",
+        "-fvisibility=hidden",
     ],
 
     shared_libs: [
-        "libbinder_ndk",
-        "libfmq",
-        "libhidlbase",
-        "libhardware",
-        "libnativewindow",
-        "liblog",
-        "libutils",
-        "libstagefright_foundation",
-        "libcutils",
-        "libcamera_metadata",
-        "libmediandk",
         "android.frameworks.cameraservice.common-V1-ndk",
         "android.frameworks.cameraservice.device-V2-ndk",
         "android.frameworks.cameraservice.service-V2-ndk",
+        "libbinder_ndk",
+        "libcamera_metadata",
+        "libcutils",
+        "libfmq",
+        "libhardware",
+        "libhidlbase",
+        "liblog",
+        "libmediandk",
+        "libnativewindow",
+        "libstagefright_foundation",
+        "libutils",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
@@ -175,19 +176,19 @@
     name: "ACameraNdkVendorTest",
     vendor: true,
     srcs: [
-        "ndk_vendor/tests/AImageReaderVendorTest.cpp",
         "ndk_vendor/tests/ACameraManagerTest.cpp",
+        "ndk_vendor/tests/AImageReaderVendorTest.cpp",
     ],
     shared_libs: [
         "libcamera2ndk_vendor",
         "libcamera_metadata",
+        "libcutils",
         "libhidlbase",
+        "liblog",
         "libmediandk",
         "libnativewindow",
-        "libutils",
         "libui",
-        "libcutils",
-        "liblog",
+        "libutils",
     ],
     static_libs: [
         "android.hardware.camera.common@1.0-helper",
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index f36a743..c3bec0a 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -810,9 +810,15 @@
 
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
+
+    AttributionSourceState clientAttribution;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+    clientAttribution.deviceId = mDeviceContext.deviceId;
+
     binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
             targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-            mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
             &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -860,13 +866,20 @@
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = device->getServiceCallback();
     sp<hardware::camera2::ICameraDeviceUser> deviceRemote;
     int targetSdkVersion = android_get_application_target_sdk_version();
+
+    AttributionSourceState clientAttribution;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+    clientAttribution.deviceId = mDeviceContext.deviceId;
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
+
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     binder::Status serviceRet = cs->connectDevice(
-            callbacks, cameraId, "", {},
-            hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
+            callbacks, cameraId, /*oomScoreOffset*/0,
             targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-            mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
             /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index c9d2f1a..7d234bb 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -2287,12 +2287,11 @@
      * <p>When low light boost is enabled by setting the AE mode to
      * 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY', it can dynamically apply a low light
      * boost when the light level threshold is exceeded.</p>
-     * <p>This field is present in the CaptureResult when the AE mode is set to
-     * 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'. Otherwise, the field is not present.</p>
      * <p>This state indicates when low light boost is 'ACTIVE' and applied. Similarly, it can
      * indicate when it is not being applied by returning 'INACTIVE'.</p>
      * <p>This key will be absent from the CaptureResult if AE mode is not set to
      * 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY.</p>
+     * <p>The default value will always be 'INACTIVE'.</p>
      */
     ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE =                     // byte (acamera_metadata_enum_android_control_low_light_boost_state_t)
             ACAMERA_CONTROL_START + 59,
@@ -8311,11 +8310,8 @@
      * <p>If the session configuration is not supported, the AE mode reported in the
      * CaptureResult will be 'ON' instead of 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'.</p>
      * <p>When this AE mode is enabled, the CaptureResult field
-     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will be present and not null. Otherwise, the
-     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE field will not be present in the CaptureResult.</p>
-     * <p>The application can observe the CaptureResult field
-     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE to determine when low light boost is 'ACTIVE' or
-     * 'INACTIVE'.</p>
+     * ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will indicate when low light boost is 'ACTIVE'
+     * or 'INACTIVE'. By default ACAMERA_CONTROL_LOW_LIGHT_BOOST_STATE will be 'INACTIVE'.</p>
      * <p>The low light boost is 'ACTIVE' once the scene lighting condition is less than the
      * upper bound lux value defined by ACAMERA_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE.
      * This mode will be 'INACTIVE' once the scene lighting condition is greater than the
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
index 9aaac6a..484335a 100644
--- a/camera/tests/Android.bp
+++ b/camera/tests/Android.bp
@@ -29,6 +29,7 @@
         "CameraCharacteristicsPermission.cpp",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "liblog",
         "libutils",
         "libcutils",
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index e5f99be..d21513c 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -33,6 +33,7 @@
 #include <hardware/gralloc.h>
 
 #include <camera/CameraMetadata.h>
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/ICameraServiceListener.h>
 #include <android/hardware/BnCameraServiceListener.h>
@@ -347,7 +348,11 @@
     binder::Status res;
 
     int32_t numCameras = 0;
-    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId,
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.packageName = "meeeeeeeee!";
+    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution,
             /*devicePolicy*/0, &numCameras);
     EXPECT_TRUE(res.isOk()) << res;
     EXPECT_LE(0, numCameras);
@@ -360,7 +365,7 @@
 
     EXPECT_EQ(numCameras, static_cast<const int>(statuses.size()));
     for (const auto &it : statuses) {
-        listener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
+        listener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
     }
 
     for (int32_t i = 0; i < numCameras; i++) {
@@ -379,7 +384,7 @@
         CameraMetadata metadata;
         res = service->getCameraCharacteristics(cameraId,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                clientAttribution, /*devicePolicy*/0, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -393,10 +398,10 @@
         // Check connect binder calls
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        res = service->connectDevice(callbacks, cameraId, "meeeeeeeee!",
-                {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+        res = service->connectDevice(callbacks, cameraId,
+                /*oomScoreOffset*/ 0,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, /*out*/&device);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -406,12 +411,12 @@
         if (torchStatus == hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF) {
             // Check torch calls
             res = service->setTorchMode(cameraId,
-                    /*enabled*/true, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
+                    /*enabled*/true, callbacks, clientAttribution, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON, i));
             res = service->setTorchMode(cameraId,
-                    /*enabled*/false, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
+                    /*enabled*/false, callbacks, clientAttribution, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF, i));
@@ -437,10 +442,14 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         {
             SCOPED_TRACE("openNewDevice");
-            binder::Status res = service->connectDevice(callbacks, deviceId, "meeeeeeeee!",
-                    {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+            AttributionSourceState clientAttribution;
+            clientAttribution.deviceId = kDefaultDeviceId;
+            clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+            clientAttribution.packageName = "meeeeeeeee!";
+            binder::Status res = service->connectDevice(callbacks, deviceId,
+                    /*oomScoreOffset*/ 0,
                     /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                    /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+                    /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
                     /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
@@ -473,11 +482,13 @@
         serviceListener = new TestCameraServiceListener();
         std::vector<hardware::CameraStatus> statuses;
         service->addListener(serviceListener, &statuses);
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         for (const auto &it : statuses) {
-            serviceListener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
+            serviceListener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
         }
         service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
-                kDefaultDeviceId, /*devicePolicy*/0, &numCameras);
+                clientAttribution, /*devicePolicy*/0, &numCameras);
     }
 
     virtual void TearDown() {
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 10f7f22..9204eb1 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -47,8 +48,10 @@
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+            hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
             &numCameras);
     EXPECT_TRUE(rc.isOk());
 }
@@ -73,9 +76,11 @@
 
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 56fcfa4..2740d09 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -84,8 +85,10 @@
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+            hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
             &numCameras);
     EXPECT_TRUE(rc.isOk());
 
@@ -183,9 +186,11 @@
         }
 
         CameraMetadata metadata;
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                clientAttribution, /*devicePolicy*/0, &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -209,11 +214,12 @@
             continue;
         }
 
+        clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+        clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+        clientAttribution.packageName = "ZSLTest";
         rc = mCameraService->connect(this, cameraId,
-                "ZSLTest", hardware::ICameraService::USE_CALLING_UID,
-                hardware::ICameraService::USE_CALLING_PID,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, kDefaultDeviceId,
+                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, clientAttribution,
                 /*devicePolicy*/0, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index bd97c39..3b6413c 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -31,6 +31,7 @@
     ],
     shared_libs: [
         "camera_platform_flags_c_lib",
+        "framework-permission-aidl-cpp",
         "libbase",
         "libcutils",
         "libutils",
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index b0f59f1..f46d246 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -17,6 +17,7 @@
 #include <Camera.h>
 #include <CameraParameters.h>
 #include <CameraUtils.h>
+#include <android/content/AttributionSourceState.h>
 #include <binder/MemoryDealer.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <gui/Surface.h>
@@ -123,21 +124,24 @@
     sp<ICameraService> cameraService = nullptr;
     cameraService = interface_cast<ICameraService>(binder);
     sp<ICamera> cameraDevice = nullptr;
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     if (mFDP->ConsumeBool()) {
-        cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */, "CAMERAFUZZ",
-                               hardware::ICameraService::USE_CALLING_UID,
-                               hardware::ICameraService::USE_CALLING_PID,
+        clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+        clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+        clientAttribution.packageName = "CAMERAFUZZ";
+        cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
                                /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                                /*overrideToPortrait*/ false, /*forceSlowJpegMode*/ false,
-                               kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
+                               clientAttribution, /*devicePolicy*/0, &cameraDevice);
     } else {
+        clientAttribution.uid = mFDP->ConsumeIntegral<int8_t>();
+        clientAttribution.pid = mFDP->ConsumeIntegral<int8_t>();
+        clientAttribution.packageName = mFDP->ConsumeRandomLengthString(kMaxBytes).c_str();
         cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
-                               mFDP->ConsumeRandomLengthString(kMaxBytes).c_str(),
-                               mFDP->ConsumeIntegral<int8_t>() /* clientUid */,
-                               mFDP->ConsumeIntegral<int8_t>() /* clientPid */,
                                /*targetSdkVersion*/ mFDP->ConsumeIntegral<int32_t>(),
                                /*overrideToPortrait*/ mFDP->ConsumeBool(),
-                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), kDefaultDeviceId,
+                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), clientAttribution,
                                /*devicePolicy*/0, &cameraDevice);
     }
 
@@ -165,13 +169,15 @@
     }
 
     int32_t cameraId = mFDP->ConsumeIntegral<int32_t>();
-    Camera::getNumberOfCameras(kDefaultDeviceId, /*devicePolicy*/0);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    Camera::getNumberOfCameras(clientAttribution, /*devicePolicy*/0);
     CameraInfo cameraInfo;
     cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
                                             : mFDP->ConsumeIntegral<int32_t>();
     cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
                                                  : mFDP->ConsumeIntegral<int32_t>();
-    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, clientAttribution,
                           /*devicePolicy*/0, &cameraInfo);
     mCamera->reconnect();
 
diff --git a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
index 16ea15e..6e55a16 100644
--- a/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
+++ b/drm/libdrmframework/plugins/forward-lock/FwdLockEngine/Android.bp
@@ -64,10 +64,6 @@
         "libfwdlock-decoder",
     ],
 
-    whole_static_libs: [
-        "libc++fs",
-    ],
-
     local_include_dirs: ["include"],
 
     relative_install_path: "drm",
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index 4c3e25a..c3ae59c 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -60,6 +60,13 @@
 }
 
 flag {
+    name: "music_fx_edge_to_edge"
+    namespace: "media_audio"
+    description: "Enable Edge-to-edge feature for MusicFx and handle insets"
+    bug: "336204940"
+}
+
+flag {
     name: "port_to_piid_simplification"
     namespace: "media_audio"
     description: "PAM only needs for each piid the last portId mapping"
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 94b60e2..39fa187 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -15,6 +15,13 @@
 }
 
 flag {
+    name: "effect_chain_callback_improve"
+    namespace: "media_audio"
+    description: "Improve effect chain callback mutex logic."
+    bug: "342413767"
+}
+
+flag {
     name: "fdtostring_timeout_fix"
     namespace: "media_audio"
     description: "Improve fdtostring implementation to properly handle timing out."
@@ -22,6 +29,14 @@
 }
 
 flag {
+    name: "fix_call_audio_patch"
+    namespace: "media_audio"
+    description:
+        "optimize creation and release of audio patches for call routing"
+    bug: "292492229"
+}
+
+flag {
     name: "fix_concurrent_playback_behavior_with_bit_perfect_client"
     namespace: "media_audio"
     description:
@@ -31,6 +46,15 @@
 }
 
 flag {
+    name: "fix_input_sharing_logic"
+    namespace: "media_audio"
+    description:
+        "Fix the audio policy logic that decides to reuse or close "
+        "input streams when resources are exhausted"
+    bug: "338446410"
+}
+
+flag {
     name: "mutex_priority_inheritance"
     namespace: "media_audio"
     description:
@@ -41,10 +65,9 @@
 }
 
 flag {
-    name: "fix_input_sharing_logic"
+    name: "use_bt_sco_for_media"
     namespace: "media_audio"
     description:
-        "Fix the audio policy logic that decides to reuse or close "
-        "input streams when resources are exhausted"
-    bug: "338446410"
+        "Play media strategy over Bluetooth SCO when active"
+    bug: "292037886"
 }
diff --git a/media/audioaidlconversion/AidlConversionCppNdk.cpp b/media/audioaidlconversion/AidlConversionCppNdk.cpp
index 77418eb..9eaddce 100644
--- a/media/audioaidlconversion/AidlConversionCppNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionCppNdk.cpp
@@ -1069,13 +1069,6 @@
             if (mac.size() != 6) return BAD_VALUE;
             snprintf(addressBuffer, AUDIO_DEVICE_MAX_ADDRESS_LEN, "%02X:%02X:%02X:%02X:%02X:%02X",
                     mac[0], mac[1], mac[2], mac[3], mac[4], mac[5]);
-            // special case for anonymized mac address:
-            // change anonymized bytes back from FD:FF:FF:FF to XX:XX:XX:XX
-            std::string address(addressBuffer);
-            if (address.compare(0, strlen("FD:FF:FF:FF"), "FD:FF:FF:FF") == 0) {
-                address.replace(0, strlen("FD:FF:FF:FF"), "XX:XX:XX:XX");
-            }
-            strcpy(addressBuffer, address.c_str());
         } break;
         case Tag::ipv4: {
             const std::vector<uint8_t>& ipv4 = aidl.address.get<AudioDeviceAddress::ipv4>();
@@ -1136,20 +1129,11 @@
     if (!legacyAddress.empty()) {
         switch (suggestDeviceAddressTag(aidl.type)) {
             case Tag::mac: {
-                // special case for anonymized mac address:
-                // change anonymized bytes so that they can be scanned as HEX bytes
-                // Use '01' for LSB bits 0 and 1 as Bluetooth MAC addresses are never multicast
-                // and universaly administered
-                std::string address = legacyAddress;
-                if (address.compare(0, strlen("XX:XX:XX:XX"), "XX:XX:XX:XX") == 0) {
-                    address.replace(0, strlen("XX:XX:XX:XX"), "FD:FF:FF:FF");
-                }
-
                 std::vector<uint8_t> mac(6);
-                int status = sscanf(address.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
+                int status = sscanf(legacyAddress.c_str(), "%hhX:%hhX:%hhX:%hhX:%hhX:%hhX",
                         &mac[0], &mac[1], &mac[2], &mac[3], &mac[4], &mac[5]);
                 if (status != mac.size()) {
-                    ALOGE("%s: malformed MAC address: \"%s\"", __func__, address.c_str());
+                    ALOGE("%s: malformed MAC address: \"%s\"", __func__, legacyAddress.c_str());
                     return unexpected(BAD_VALUE);
                 }
                 aidl.address = AudioDeviceAddress::make<AudioDeviceAddress::mac>(std::move(mac));
diff --git a/media/audioaidlconversion/include/media/AidlConversionEffect.h b/media/audioaidlconversion/include/media/AidlConversionEffect.h
index b03d06b..e51bf8b 100644
--- a/media/audioaidlconversion/include/media/AidlConversionEffect.h
+++ b/media/audioaidlconversion/include/media/AidlConversionEffect.h
@@ -72,9 +72,6 @@
                 MAKE_EXTENSION_PARAMETER_ID(_effect, _tag##Tag, _extId);                          \
         aidl::android::hardware::audio::effect::Parameter _aidlParam;                             \
         RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mEffect->getParameter(_id, &_aidlParam))); \
-        aidl::android::hardware::audio::effect::VendorExtension _ext =                            \
-                VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(                              \
-                        _aidlParam, _effect, _tag, _effect::vendor, VendorExtension));            \
         return VALUE_OR_RETURN_STATUS(                                                            \
                 aidl::android::aidl2legacy_Parameter_EffectParameterWriter(_aidlParam, _param));  \
     }
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 06a21f6..aec6523 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -712,6 +712,7 @@
         case kWhatStop: {
             int32_t err = thiz->onStop();
             thiz->mOutputBlockPool.reset();
+            mRunning = false;
             Reply(msg, &err);
             break;
         }
diff --git a/media/codec2/hal/client/GraphicsTracker.cpp b/media/codec2/hal/client/GraphicsTracker.cpp
index dbbabfe..95f5a6e 100644
--- a/media/codec2/hal/client/GraphicsTracker.cpp
+++ b/media/codec2/hal/client/GraphicsTracker.cpp
@@ -664,16 +664,24 @@
 
     int slotId;
     uint64_t outBufferAge;
-    ::android::FrameEventHistoryDelta outTimestamps;
     sp<Fence> fence;
 
     ::android::status_t status = igbp->dequeueBuffer(
-            &slotId, &fence, width, height, format, usage, &outBufferAge, &outTimestamps);
+            &slotId, &fence, width, height, format, usage, &outBufferAge, nullptr);
     if (status < ::android::OK) {
         if (status == ::android::TIMED_OUT || status == ::android::WOULD_BLOCK) {
             ALOGW("BQ might not be ready for dequeueBuffer()");
             return C2_BLOCKING;
         }
+        bool cacheExpired = false;
+        {
+            std::unique_lock<std::mutex> l(mLock);
+            cacheExpired = (mBufferCache.get() != cache.get());
+        }
+        if (cacheExpired) {
+            ALOGW("a new BQ is configured. dequeueBuffer() error %d", (int)status);
+            return C2_BLOCKING;
+        }
         ALOGE("BQ in inconsistent status. dequeueBuffer() error %d", (int)status);
         return C2_CORRUPTED;
     }
diff --git a/media/codec2/hal/common/MultiAccessUnitHelper.cpp b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
index b1fa82f..55bf7a9 100644
--- a/media/codec2/hal/common/MultiAccessUnitHelper.cpp
+++ b/media/codec2/hal/common/MultiAccessUnitHelper.cpp
@@ -497,9 +497,10 @@
                     // This is to take care of the last bytes and to decide to send with
                     // FLAG_INCOMPLETE or not.
                     if ((frame->mWview
-                            && (frame->mWview->offset() > frame->mLargeFrameTuning.thresholdSize))
+                            && (frame->mWview->offset() >= frame->mLargeFrameTuning.thresholdSize))
                             || frame->mComponentFrameIds.empty()) {
                         if (frame->mLargeWork) {
+                            frame->mLargeWork->result = C2_OK;
                             finalizeWork(*frame);
                             addOutWork(frame->mLargeWork);
                             frame->reset();
@@ -558,12 +559,15 @@
         c2_status_t ret = C2_OK;
         if (frame.mLargeWork == nullptr) {
             frame.mLargeWork.reset(new C2Work);
+            frame.mLargeWork->result = C2_OK;
+            frame.mLargeWork->input.flags = (C2FrameData::flags_t)0;
             frame.mLargeWork->input.ordinal = frame.inOrdinal;
             frame.mLargeWork->input.ordinal.frameIndex = frame.inOrdinal.frameIndex;
         }
         if (allocateWorket) {
             if (frame.mLargeWork->worklets.size() == 0) {
                 frame.mLargeWork->worklets.emplace_back(new C2Worklet);
+                frame.mLargeWork->worklets.back()->output.flags = (C2FrameData::flags_t)0;
             }
         }
         if (allocateBuffer) {
@@ -611,6 +615,9 @@
         if (c2ret != C2_OK) {
             return c2ret;
         }
+        uint32_t flags = work->input.flags;
+        flags |= frame.mLargeWork->input.flags;
+        frame.mLargeWork->input.flags = (C2FrameData::flags_t)flags;
         C2FrameData& outputFramedata = frame.mLargeWork->worklets.front()->output;
         if (!(*worklet)->output.configUpdate.empty()) {
             for (auto& configUpdate : (*worklet)->output.configUpdate) {
@@ -678,6 +685,9 @@
                         }
                     }
                     allocateWork(frame, true, true);
+                    uint32_t flags = work->input.flags;
+                    flags |= frame.mLargeWork->input.flags;
+                    frame.mLargeWork->input.flags = (C2FrameData::flags_t)flags;
                     C2ReadView rView = blocks.front().map().get();
                     if (rView.error()) {
                         LOG(ERROR) << "Buffer read view error";
@@ -744,7 +754,8 @@
     }
     LOG(DEBUG) << "Finalizing work with input Idx "
             << frame.mLargeWork->input.ordinal.frameIndex.peekull()
-            << " timestamp " << timeStampUs;
+            << " timestamp " << timeStampUs
+            << " inFlags " << inFlags;
     uint32_t finalFlags = 0;
     if ((!forceComplete)
             && (frame.mLargeWork->result == C2_OK)
diff --git a/media/codec2/hal/hidl/1.0/utils/Component.cpp b/media/codec2/hal/hidl/1.0/utils/Component.cpp
index 0259d90..62f0e25 100644
--- a/media/codec2/hal/hidl/1.0/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.0/utils/Component.cpp
@@ -521,7 +521,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
diff --git a/media/codec2/hal/hidl/1.1/utils/Component.cpp b/media/codec2/hal/hidl/1.1/utils/Component.cpp
index d34d84e..7f2c4dd 100644
--- a/media/codec2/hal/hidl/1.1/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.1/utils/Component.cpp
@@ -527,7 +527,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
diff --git a/media/codec2/hal/hidl/1.2/utils/Component.cpp b/media/codec2/hal/hidl/1.2/utils/Component.cpp
index f78e827..7b0aa9b 100644
--- a/media/codec2/hal/hidl/1.2/utils/Component.cpp
+++ b/media/codec2/hal/hidl/1.2/utils/Component.cpp
@@ -523,7 +523,18 @@
 
 Return<Status> Component::stop() {
     InputBufferManager::unregisterFrameData(mListener);
-    return static_cast<Status>(mComponent->stop());
+    Status status = static_cast<Status>(mComponent->stop());
+    {
+        std::lock_guard<std::mutex> lock(mBlockPoolsMutex);
+        for (auto it = mBlockPools.begin(); it != mBlockPools.end(); ++it) {
+            if (it->second->getAllocatorId() == C2PlatformAllocatorStore::BUFFERQUEUE) {
+                std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                        std::static_pointer_cast<C2BufferQueueBlockPool>(it->second);
+                bqPool->clearDeferredBlocks();
+            }
+        }
+    }
+    return status;
 }
 
 Return<Status> Component::reset() {
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index ca0aabb..ae66a58 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -2605,18 +2605,31 @@
             c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_LINEAR, nullptr, &pool);
 
             if (status == C2_OK) {
+                int width, height;
+                config->mInputFormat->findInt32("width", &width);
+                config->mInputFormat->findInt32("height", &height);
+                // The length of the qp-map corresponds to the number of 16x16 blocks in one frame
+                int expectedMapSize = ((width + 15) / 16) * ((height + 15) / 16);
                 size_t mapSize = qpOffsetMap->size();
-                std::shared_ptr<C2LinearBlock> block;
-                status = pool->fetchLinearBlock(mapSize,
-                        C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE}, &block);
-                if (status == C2_OK && !block->map().get().error()) {
-                    C2WriteView wView = block->map().get();
-                    uint8_t* outData = wView.data();
-                    memcpy(outData, qpOffsetMap->data(), mapSize);
-                    C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
-                            kParamIndexQpOffsetMapBuffer,
-                            block->share(0, mapSize, C2Fence()));
-                    mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
+                if (mapSize >= expectedMapSize) {
+                    std::shared_ptr<C2LinearBlock> block;
+                    status = pool->fetchLinearBlock(
+                            expectedMapSize,
+                            C2MemoryUsage{C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE},
+                            &block);
+                    if (status == C2_OK && !block->map().get().error()) {
+                        C2WriteView wView = block->map().get();
+                        uint8_t* outData = wView.data();
+                        memcpy(outData, qpOffsetMap->data(), expectedMapSize);
+                        C2InfoBuffer info = C2InfoBuffer::CreateLinearBuffer(
+                                kParamIndexQpOffsetMapBuffer,
+                                block->share(0, expectedMapSize, C2Fence()));
+                        mChannel->setInfoBuffer(std::make_shared<C2InfoBuffer>(info));
+                    }
+                } else {
+                    ALOGE("Ignoring param key %s as buffer size %d is less than expected "
+                          "buffer size %d",
+                          PARAMETER_KEY_QP_OFFSET_MAP, mapSize, expectedMapSize);
                 }
             }
             params->removeEntryByName(PARAMETER_KEY_QP_OFFSET_MAP);
@@ -2633,6 +2646,15 @@
     if (config->mInputSurface == nullptr
             && (property_get_bool("debug.stagefright.ccodec_delayed_params", false)
                     || comp->getName().find("c2.android.") == 0)) {
+        std::vector<std::unique_ptr<C2Param>> localConfigUpdate;
+        for (const std::unique_ptr<C2Param> &param : configUpdate) {
+            if (param && param->coreIndex().coreIndex() == C2StreamSurfaceScalingInfo::CORE_INDEX) {
+                localConfigUpdate.push_back(C2Param::Copy(*param));
+            }
+        }
+        if (!localConfigUpdate.empty()) {
+            (void)config->setParameters(comp, localConfigUpdate, C2_MAY_BLOCK);
+        }
         mChannel->setParameters(configUpdate);
     } else {
         sp<AMessage> outputFormat = config->mOutputFormat;
diff --git a/media/codec2/sfplugin/CCodecConfig.cpp b/media/codec2/sfplugin/CCodecConfig.cpp
index db59227..36725ec 100644
--- a/media/codec2/sfplugin/CCodecConfig.cpp
+++ b/media/codec2/sfplugin/CCodecConfig.cpp
@@ -1890,6 +1890,9 @@
         if (mDomain == (IS_VIDEO | IS_ENCODER)) {
             AString qpOffsetRects;
             if (params->findString(PARAMETER_KEY_QP_OFFSET_RECTS, &qpOffsetRects)) {
+                int width, height;
+                mInputFormat->findInt32("width", &width);
+                mInputFormat->findInt32("height", &height);
                 std::vector<C2QpOffsetRectStruct> c2QpOffsetRects;
                 char mutableStrQpOffsetRects[strlen(qpOffsetRects.c_str()) + 1];
                 strcpy(mutableStrQpOffsetRects, qpOffsetRects.c_str());
@@ -1899,11 +1902,17 @@
                     if (sscanf(box, "%d,%d-%d,%d=%d", &top, &left, &bottom, &right, &offset) == 5) {
                         left = c2_max(0, left);
                         top = c2_max(0, top);
+                        right = c2_min(right, width);
+                        bottom = c2_min(bottom, height);
                         if (right > left && bottom > top) {
                             C2Rect rect(right - left, bottom - top);
                             rect.at(left, top);
                             c2QpOffsetRects.push_back(C2QpOffsetRectStruct(rect, offset));
+                        } else {
+                            ALOGE("Rects configuration %s is not valid.", box);
                         }
+                    } else {
+                        ALOGE("Rects configuration %s doesn't follow the string pattern.", box);
                     }
                     box = strtok(nullptr, ";");
                 }
diff --git a/media/codec2/tests/Android.bp b/media/codec2/tests/Android.bp
index 02c356c..7d671a7 100644
--- a/media/codec2/tests/Android.bp
+++ b/media/codec2/tests/Android.bp
@@ -45,6 +45,11 @@
         "C2SampleComponent_test.cpp",
         "C2UtilTest.cpp",
         "vndk/C2BufferTest.cpp",
+        "vndk/C2FenceTest.cpp",
+    ],
+
+    static_libs: [
+        "libgmock",
     ],
 
     shared_libs: [
@@ -52,6 +57,7 @@
         "libcodec2_vndk",
         "libcutils",
         "liblog",
+        "libui",
         "libutils",
     ],
 
diff --git a/media/codec2/tests/vndk/C2FenceTest.cpp b/media/codec2/tests/vndk/C2FenceTest.cpp
new file mode 100644
index 0000000..9292381
--- /dev/null
+++ b/media/codec2/tests/vndk/C2FenceTest.cpp
@@ -0,0 +1,455 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <gmock/gmock.h>
+#include <gtest/gtest.h>
+
+#include <C2Buffer.h>
+#include <C2FenceFactory.h>
+
+#include <unistd.h>
+
+#include <android-base/unique_fd.h>
+#include <linux/kcmp.h>       /* Definition of KCMP_* constants */
+#include <sys/mman.h>
+#include <sys/syscall.h>      /* Definition of SYS_* constants */
+#include <ui/Fence.h>
+
+namespace android {
+
+static int fd_kcmp(int fd1, int fd2) {
+    static pid_t pid = getpid();
+
+    return syscall(SYS_kcmp, pid, pid, KCMP_FILE, fd1, fd2);
+}
+
+// matcher to check if value (arg) and fd refers to the same file
+MATCHER_P(RefersToTheSameFile, fd, "") {
+    return fd_kcmp(fd, arg) == 0;
+}
+
+// matcher to check if value (arg) is a dup of an fd
+MATCHER_P(IsDupOf, fd, "") {
+    return (ExplainMatchResult(::testing::Ne(-1), arg, result_listener) &&
+            ExplainMatchResult(::testing::Ne(fd), arg, result_listener) &&
+            ExplainMatchResult(RefersToTheSameFile(fd), arg, result_listener));
+}
+
+class C2FenceTest : public ::testing::Test {
+public:
+    C2FenceTest() = default;
+
+    ~C2FenceTest() = default;
+
+
+protected:
+    enum : int32_t {
+        SYNC_FENCE_DEPRECATED_MAGIC     = 3,
+        SYNC_FENCE_UNORDERED_MAGIC      = '\302fsu',
+        SYNC_FENCE_MAGIC                = '\302fso',
+    };
+
+    // Validate a null fence
+    void validateNullFence(const C2Fence &fence);
+
+    // Validate a single fd sync fence
+    void validateSingleFdFence(const C2Fence &fence, int fd);
+
+    // Validate a two fd unordered sync fence
+    void validateTwoFdUnorderedFence(const C2Fence &fence, int fd1, int fd2, int mergeFd);
+
+    // Validate a three fd sync fence
+    void validateThreeFdFence(const C2Fence &fence, int fd1, int fd2, int fd3);
+};
+
+TEST_F(C2FenceTest, IsDupOf_sanity) {
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    EXPECT_THAT(fd1, ::testing::Not(IsDupOf(fd2)));
+    EXPECT_THAT(-1, ::testing::Not(IsDupOf(fd2)));
+    EXPECT_THAT(-1, ::testing::Not(IsDupOf(-1)));
+    EXPECT_THAT(fd3, ::testing::Not(IsDupOf(fd3)));
+
+    int fd4 = dup(fd3);
+    EXPECT_THAT(fd4, IsDupOf(fd3));
+    EXPECT_THAT(fd3, IsDupOf(fd4));
+
+    close(fd1);
+    close(fd2);
+    close(fd3);
+    close(fd4);
+}
+
+TEST_F(C2FenceTest, NullFence) {
+    validateNullFence(C2Fence());
+}
+
+void C2FenceTest::validateNullFence(const C2Fence &fence) {
+    // Verify that the fence is valid.
+    EXPECT_TRUE(fence.valid());
+    EXPECT_TRUE(fence.ready());
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_EQ(fenceFd.get(), -1);
+    EXPECT_FALSE(fence.isHW()); // perhaps this should be false for a null fence
+
+    // A null fence has no fds
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::IsEmpty());
+    for (int fd : fds) {
+        close(fd);
+    }
+
+    // A null fence has no native handle
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::IsNull());
+    if (handle) {
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, SyncFence_with_negative_fd) {
+    // Create a SyncFence with a negative fd.
+    C2Fence fence = _C2FenceFactory::CreateSyncFence(-1, false /* validate */);
+
+    validateNullFence(fence);
+}
+
+TEST_F(C2FenceTest, SyncFence_with_valid_fd) {
+    // Create a SyncFence with a valid fd. We cannot create an actual sync fd,
+    // so we cannot test wait(), but we can verify the ABI APIs
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateSyncFence(fd, false /* validate */);
+    validateSingleFdFence(fence, fd);
+}
+
+void C2FenceTest::validateSingleFdFence(const C2Fence &fence, int fd) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the initial fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(fd));
+
+    // Verify that fds returns a duped version of the initial fd
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(1));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 1);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd));
+        EXPECT_EQ(handle->data[1], SYNC_FENCE_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_one_valid_test_fd) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { -1, fd, -1 }, &status);
+    // if we only have one valid fd, we are not merging fences, so the test fd is not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_one_valid_test_fd_null_status) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { -1, fd, -1 });
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_merge_failure) {
+    // Create a multi SyncFence with a multiple non-sync fence fds. This should
+    // result in a fence created, but also an error.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { fd1, fd2, fd3 }, &status);
+    EXPECT_EQ(status, C2_CORRUPTED);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_merge_failure_null_status) {
+    // Create a multi SyncFence with a multiple non-sync fence fds. This should
+    // result in a fence created, but also an error.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        { fd1, fd2, fd3 });
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+TEST_F(C2FenceTest, UnorderedMultiSyncFence_with_multiple_fds) {
+    // We cannot create a true unordered multi sync fence as we can only
+    // create test fds and those cannot be merged. As such, we cannot
+    // test the factory method CreateUnorderedMultiSyncFence. We can however
+    // create a test fence from a constructed native handle.
+
+    // Technically, we need 3 fds as if we end up with only 2, we wouldn't
+    // actually need a 2nd (final fence fd) since it is equivalent to the
+    // first. In fact we will generate (and always generated) a single fd
+    // fence in that case.
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int mergeFd = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = mergeFd;
+    handle->data[3] = SYNC_FENCE_UNORDERED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateTwoFdUnorderedFence(fence, fd1, fd2, mergeFd);
+}
+
+void C2FenceTest::validateTwoFdUnorderedFence(
+        const C2Fence &fence, int fd1, int fd2, int mergeFd) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the merge fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(mergeFd));
+
+    // Verify that fds returns a duped versions of the initial fds (but not the merge fd)
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(2));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd1), IsDupOf(fd2)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 3);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd1));
+        EXPECT_THAT(handle->data[1], IsDupOf(fd2));
+        EXPECT_THAT(handle->data[2], IsDupOf(mergeFd));
+        EXPECT_EQ(handle->data[3], SYNC_FENCE_UNORDERED_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_one_valid_test_fd) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { -1, fd, -1 }, &status);
+    // if we only have one valid fd, we are not merging fences, so the test fds are not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_one_valid_test_fd_null_status) {
+    // Create a multi SyncFence with a single valid fd. This should create
+    // a single fd sync fence. We can only validate this through its public
+    // methods: fd/fds and verify the native handle ABI.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { -1, fd, -1 });
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, MultiSyncFence_with_multiple_fds) {
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    c2_status_t status = C2_BAD_VALUE;
+    C2Fence fence = _C2FenceFactory::CreateMultiSyncFence(
+        { fd1, fd2, fd3 }, &status);
+    // test fds are not validated
+    EXPECT_EQ(status, C2_OK);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+void C2FenceTest::validateThreeFdFence(const C2Fence &fence, int fd1, int fd2, int fd3) {
+    // EXPECT_TRUE(fence.valid()); // need a valid sync fd to test this
+    // EXPECT_TRUE(fence.ready());
+    // Verify that the fence says it is a HW sync fence.
+    EXPECT_TRUE(fence.isHW()); // FIXME this may be an implementation detail
+
+    // Verify that the fd returned is a duped version of the final fd
+    base::unique_fd fenceFd{fence.fd()};
+    EXPECT_THAT(fenceFd.get(), IsDupOf(fd3));
+
+    // Verify that fds returns a duped versions of all 3 initial fds
+    std::vector<int> fds = ExtractFdsFromCodec2SyncFence(fence);
+    EXPECT_THAT(fds, ::testing::SizeIs(3));
+    EXPECT_THAT(fds, ::testing::ElementsAre(IsDupOf(fd1), IsDupOf(fd2), IsDupOf(fd3)));
+    for (int fd_i : fds) {
+        close(fd_i);
+    }
+
+    native_handle_t *handle = _C2FenceFactory::CreateNativeHandle(fence);
+    EXPECT_THAT(handle, ::testing::NotNull());
+    if (handle) {
+        EXPECT_EQ(handle->numFds, 3);
+        EXPECT_EQ(handle->numInts, 1);
+        EXPECT_THAT(handle->data[0], IsDupOf(fd1));
+        EXPECT_THAT(handle->data[1], IsDupOf(fd2));
+        EXPECT_THAT(handle->data[2], IsDupOf(fd3));
+        EXPECT_EQ(handle->data[3], SYNC_FENCE_MAGIC);
+
+        native_handle_close(handle);
+        native_handle_delete(handle);
+    }
+}
+
+TEST_F(C2FenceTest, BackwardCompat_UDC_sync_fence) {
+    // Create a single SyncFence from a UDC native handle
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(1 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q1_single_fd_fence) {
+    // Create a single SyncFence from a 24Q1 native handle
+    // This had the same (albeit separately duped) fd twice, and used the legacy
+    // magic number.
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(2 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = dup(fd);
+    handle->data[2] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q3_single_fd_fence) {
+    // Create a single SyncFence from the defined native handle
+
+    int fd = memfd_create("test", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(1 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd;
+    handle->data[1] = SYNC_FENCE_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateSingleFdFence(fence, fd);
+}
+
+TEST_F(C2FenceTest, BackwardCompat_24Q1_multi_fd_fence) {
+    // Create a single SyncFence from a 24Q1 era native handle with
+    // the legacy magic number.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int mergeFd = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = mergeFd;
+    handle->data[3] = SYNC_FENCE_DEPRECATED_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateTwoFdUnorderedFence(fence, fd1, fd2, mergeFd);
+}
+
+// No need to create BackwardCompat_24Q3_unordered_multi_fd_fence because
+// we are creating that fence already from the 24Q3 native handle layout
+// in the UnorderedMultiSyncFence_with_multiple_fds test.
+
+TEST_F(C2FenceTest, BackwardCompat_24Q3_multi_fd_fence) {
+    // Create a single SyncFence from a 24Q1 era native handle with
+    // the legacy magic number.
+
+    int fd1 = memfd_create("test1", 0 /* flags */);
+    int fd2 = memfd_create("test2", 0 /* flags */);
+    int fd3 = memfd_create("test3", 0 /* flags */);
+
+    native_handle_t *handle = native_handle_create(3 /* numfds */, 1 /* numints */);
+    handle->data[0] = fd1;
+    handle->data[1] = fd2;
+    handle->data[2] = fd3;
+    handle->data[3] = SYNC_FENCE_MAGIC;
+    C2Fence fence = _C2FenceFactory::CreateFromNativeHandle(handle, true /* takeOwnership */);
+    native_handle_delete(handle);
+
+    validateThreeFdFence(fence, fd1, fd2, fd3);
+}
+
+} // namespace android
diff --git a/media/codec2/vndk/C2Fence.cpp b/media/codec2/vndk/C2Fence.cpp
index 3438406..d28f926 100644
--- a/media/codec2/vndk/C2Fence.cpp
+++ b/media/codec2/vndk/C2Fence.cpp
@@ -28,16 +28,27 @@
 
 #include <utility>
 
-#define MAX_FENCE_FDS 1
+// support up to 32 sync fds (and an optional merged fd), and 1 int
+#define MAX_FENCE_FDS  33
+#define MAX_FENCE_INTS 1
 
 class C2Fence::Impl {
 public:
-    enum type_t : uint32_t {
-        INVALID_FENCE,
-        NULL_FENCE,
-        SURFACE_FENCE,
-        SYNC_FENCE,
-        PIPE_FENCE,
+    // These enums are not part of the ABI, so can be changed.
+    enum type_t : int32_t {
+        INVALID_FENCE     = -1,
+        NULL_FENCE        = 0,
+        SURFACE_FENCE     = 2,
+
+        SYNC_FENCE        = 3,
+        PIPE_FENCE        = 4,
+    };
+
+    // magic numbers for native handles
+    enum : int32_t {
+        SYNC_FENCE_DEPRECATED_MAGIC     = 3,
+        SYNC_FENCE_UNORDERED_MAGIC      = '\302fsu',
+        SYNC_FENCE_MAGIC                = '\302fso',
     };
 
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) = 0;
@@ -54,7 +65,8 @@
 
     /**
      * Create a native handle for the fence so it can be marshalled.
-     * The native handle must store fence type in the first integer.
+     * All native handles must store fence type in the last integer.
+     * The created native handle (if not null) must be closed by the caller.
      *
      * \return a valid native handle if the fence can be marshalled, otherwise return null.
      */
@@ -64,11 +76,29 @@
 
     Impl() = default;
 
+    /**
+     * Get the type of the fence from the native handle.
+     *
+     * \param nh the native handle to get the type from.
+     * \return the type of the fence, or INVALID_FENCE if the native handle is
+     * invalid or malformed.
+     */
     static type_t GetTypeFromNativeHandle(const native_handle_t* nh) {
-        if (nh && nh->numFds >= 0 && nh->numFds <= MAX_FENCE_FDS && nh->numInts > 0) {
-            return static_cast<type_t>(nh->data[nh->numFds]);
+        if (!nh || nh->numFds < 0 || nh->numFds > MAX_FENCE_FDS
+                || nh->numInts < 1 || nh->numInts > MAX_FENCE_INTS) {
+            return INVALID_FENCE;
         }
-        return INVALID_FENCE;
+
+        // the magic number for Codec 2.0 native handles is the last integer
+        switch (nh->data[nh->numFds + nh->numInts - 1]) {
+            case SYNC_FENCE_MAGIC:
+            case SYNC_FENCE_UNORDERED_MAGIC:
+            case SYNC_FENCE_DEPRECATED_MAGIC:
+                return SYNC_FENCE;
+
+            default:
+                return INVALID_FENCE;
+        }
     }
 };
 
@@ -189,6 +219,53 @@
 
 using namespace android;
 
+/**
+ * Implementation for a sync fence.
+ *
+ * A sync fence is fundamentally a fence that is created from an android sync
+ * fd (which represents a HW fence).
+ *
+ * The native handle layout for a single sync fence is:
+ *   fd[0]  - sync fd
+ *   int[0] - magic (SYNC_FENCE_MAGIC (=`\302fso'))
+ *
+ * Note: Between Android T and 24Q3, the magic number was erroneously
+ * SYNC_FENCE (=3).
+ *
+ * Multi(ple) Sync Fences
+ *
+ * Since Android 24Q3, this implementation also supports a sequence of
+ * sync fences. When this is the case, there is an expectation that the last
+ * sync fence being ready will guarantee that all other sync fences are
+ * also ready. (This guarantees backward compatibility to a single fd sync fence,
+ * and mFence will be that final fence.)
+ *
+ * It is furthermore recommended that the fences be in order - either by
+ * expected signaling time, or by the order in which they need to be ready. The
+ * specific ordering is not specified or enforced, but it could be an
+ * implementation requirement of the specific use case in the future.
+ *
+ * This implementation also supports an unordered set of sync fences. In this
+ * case, it will merge all the fences into a single merged fence, which will
+ * be the backward compatible singular fence (stored in mFence).
+ *
+ * The native handle layout for an unordered multi-fence sync fence (from Android
+ * 24Q3) is:
+ *
+ *   fd[0]   - sync fd 1
+ *   ...
+ *   fd[n-1] - sync fd N
+ *   fd[n]   - merged fence fd
+ *   int[0]  - magic (SYNC_FENCE_UNORDERED_MAGIC (='\302fsu'))
+ *
+ * The native handle layout for an ordered multi-fence sync fence (from Android
+ * 24Q3) is:
+ *
+ *   fd[0]   - sync fd 1
+ *   ...
+ *   fd[n-1] - sync fd N
+ *   int[0]  - magic (SYNC_FENCE_MAGIC (='\302fso'))
+ */
 class _C2FenceFactory::SyncFenceImpl : public C2Fence::Impl {
 public:
     virtual c2_status_t wait(c2_nsecs_t timeoutNs) {
@@ -218,11 +295,19 @@
         return mFence->dup();
     }
 
+    /**
+     * Returns a duped list of fds used when creating this fence. It will
+     * not return the internally created merged fence fd.
+     */
     std::vector<int> fds() const {
         std::vector<int> retFds;
         for (int index = 0; index < mListFences.size(); index++) {
             retFds.push_back(mListFences[index]->dup());
         }
+        // ensure that at least one fd is returned
+        if (mListFences.empty()) {
+            retFds.push_back(mFence->dup());
+        }
         return retFds;
     }
 
@@ -236,7 +321,18 @@
 
     virtual native_handle_t *createNativeHandle() const {
         std::vector<int> nativeFds = fds();
-        nativeFds.push_back(fd());
+        int32_t magic = SYNC_FENCE_MAGIC;
+
+        // Also parcel the singular fence if it is not already part of the list.
+        // If this was a single-fd fence, mListFences will be empty, but fds()
+        // already returned that a list with that single fd.
+        if (!mListFences.empty() && mListFences.back() != mFence) {
+            nativeFds.push_back(fd());
+            if (!mListFences.empty()) {
+                magic = SYNC_FENCE_UNORDERED_MAGIC;
+            }
+        }
+
         native_handle_t* nh = native_handle_create(nativeFds.size(), 1);
         if (!nh) {
             ALOGE("Failed to allocate native handle for sync fence");
@@ -249,71 +345,122 @@
         for (int i = 0; i < nativeFds.size(); i++) {
             nh->data[i] = nativeFds[i];
         }
-        nh->data[nativeFds.size()] = type();
+        nh->data[nativeFds.size()] = magic;
         return nh;
     }
 
     virtual ~SyncFenceImpl() {};
 
+    /**
+     * Constructs a SyncFenceImpl from a single sync fd. No error checking is
+     * performed on the fd here as we cannot make this a null fence.
+     *
+     * \param fenceFd the fence fd to create the SyncFenceImpl from.
+     */
     SyncFenceImpl(int fenceFd) :
         mFence(sp<Fence>::make(fenceFd)) {
-        mListFences.clear();
-        if (mFence) {
-            mListFences.push_back(mFence);
-        }
     }
 
-    SyncFenceImpl(const std::vector<int>& fenceFds, int mergedFd) {
-        mListFences.clear();
-
-        for (int fenceFd : fenceFds) {
-            if (fenceFd < 0) {
-                continue;
-            } else {
-                mListFences.push_back(sp<Fence>::make(fenceFd));
-                if (!mListFences.back()) {
-                    mFence.clear();
-                    break;
-                }
-                if (mergedFd == -1) {
-                    mFence = (mFence == nullptr) ? (mListFences.back()) :
-                        (Fence::merge("syncFence", mFence, mListFences.back()));
-                }
-            }
-        }
-        if (mergedFd != -1)
-        {
-            mFence = sp<Fence>::make(mergedFd);
-        }
-        if (!mFence) {
-            mListFences.clear();
-        }
+    SyncFenceImpl(const sp<Fence> &fence) :
+        mFence(fence) {
     }
 
-    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(const native_handle_t* nh) {
-        if (!nh || nh->numFds < 1 || nh->numInts < 1) {
-            ALOGE("Invalid handle for sync fence");
+    /**
+     * Constructs a SyncFenceImpl from a list of sync fds.
+     *
+     * \param fenceFds the list of fence fds to create the SyncFenceImpl from.
+     * \param finalFence the singular fence for this multi-fd fence. This can
+     * be either the last fence in fences or a sepearate (merged) fence.
+     */
+    SyncFenceImpl(const std::vector<sp<Fence>>& fences, const sp<Fence> &finalFence) :
+        mListFences(fences),
+        mFence(finalFence) {
+    }
+
+    /**
+     * Creates a SyncFenceImpl from a native handle.
+     *
+     * \param nh the native handle to create the SyncFenceImpl from.
+     * \param takeOwnership if true, the SyncFenceImpl will take ownership of the
+     *                      file descriptors in the native handle. Otherwise,
+     *                      the SyncFenceImpl will dup the file descriptors.
+     *
+     * \return a shared_ptr to the SyncFenceImpl, or nullptr if the native
+     * handle is invalid or malformed.
+    */
+    static std::shared_ptr<SyncFenceImpl> CreateFromNativeHandle(
+            const native_handle_t* nh, bool takeOwnership) {
+        // we should only call this method if _C2FenceFactory::GetTypeFromNativeHandle
+        // returned SYNC_FENCE, but do these checks anyways to avoid overflows
+        // in case that does not happen.
+        if (!nh) {
+            ALOGE("Invalid handle for a sync fence (nullptr)");
+            return nullptr;
+        } else if (nh->numFds < 1 || nh->numInts < 1
+                || nh->numFds > MAX_FENCE_FDS || nh->numInts > MAX_FENCE_INTS) {
+            ALOGE("Invalid handle for a sync fence (%d fds, %d ints)", nh->numFds, nh->numInts);
             return nullptr;
         }
-        std::vector<int> fds;
-        for (int i = 0; i < nh->numFds-1; i++) {
-            fds.push_back(dup(nh->data[i]));
-        }
-        std::shared_ptr<SyncFenceImpl> p = (nh->numFds == 1)?
-                (std::make_shared<SyncFenceImpl>(fds.back())):
-                (std::make_shared<SyncFenceImpl>(fds, (dup(nh->data[nh->numFds-1]))));
-        if (!p) {
-            ALOGE("Failed to allocate sync fence impl");
-            for (int fd : fds) {
-                close(fd);
+        std::vector<sp<Fence>> fences;
+        for (int i = 0; i < nh->numFds; i++) {
+            int fd = nh->data[i];
+            if (!takeOwnership && fd >= 0) {
+                fd = dup(fd);
+            }
+            if (fd >= 0) {
+                sp<Fence> fence = sp<Fence>::make(fd);
+                if (fence) {
+                    fences.push_back(fence);
+                } else {
+                    ALOGW("Failed to create fence from fd %d", fd);
+                }
             }
         }
+
+        std::shared_ptr<SyncFenceImpl> p;
+        if (fences.size() == 0) {
+            ALOGE("No valid fences found in handle for a sync fence");
+            return nullptr;
+        } else if (fences.size() == 1) {
+            p = std::make_shared<SyncFenceImpl>(fences[0]);
+        } else {
+            int32_t magic = nh->data[nh->numFds + nh->numInts - 1];
+            if (magic != SYNC_FENCE_MAGIC) {
+                // The last fence is the merged fence. Separate it.
+                sp<Fence> finalFence = fences.back();
+                fences.pop_back();
+
+                // Special case: if we end up with only a single element list
+                // with another merged fence, that merged fence must be the
+                // same fence. This happened in an early version of multi fd
+                // support for single-fd sync fences.
+                if (fences.size() == 1) {
+                    // For single-fd fence the sp-s must be equal
+                    finalFence = fences.back();
+                }
+                p = std::make_shared<SyncFenceImpl>(fences, finalFence);
+            } else {
+                // Use the last fence as the standalone fence.
+                p = std::make_shared<SyncFenceImpl>(fences, fences.back());
+            }
+        }
+
+        ALOGE_IF(!p, "Failed to allocate sync fence impl");
         return p;
     }
 
 private:
+    /**
+     * The list of fences in case of a multi-fence sync fence. Otherwise, this
+     * list is empty.
+     */
     std::vector<sp<Fence>> mListFences;
-    sp<Fence> mFence;  //merged fence in case mListFences size > 0
+
+    /**
+     * The singular fence for this sync fence. For multi-fence sync fences,
+     * this could be a merged fence, or simply the final fence.
+     */
+    sp<Fence> mFence;
 };
 
 std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence) {
@@ -324,39 +471,155 @@
     return retFds;
 }
 
-C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd) {
+C2Fence _C2FenceFactory::CreateSyncFence(int fenceFd, bool validate) {
     std::shared_ptr<C2Fence::Impl> p;
     if (fenceFd >= 0) {
         p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFd);
         if (!p) {
             ALOGE("Failed to allocate sync fence impl");
             close(fenceFd);
-        } else if (!p->valid()) {
+        } else if (validate && (!p->valid() || p->ready())) {
+            // don't create a fence object if the sync fd already signaled or is invalid
             p.reset();
         }
     } else {
-        ALOGV("Create sync fence from invalid fd");
-        return C2Fence();
+        ALOGV("Won't create sync fence from invalid fd");
     }
     return C2Fence(p);
 }
 
-C2Fence _C2FenceFactory::CreateMultipleFdSyncFence(const std::vector<int>& fenceFds) {
-    std::shared_ptr<C2Fence::Impl> p;
-    if (fenceFds.size() > 0) {
-        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fenceFds, -1);
-        if (!p) {
-            ALOGE("Failed to allocate sync fence impl closing FDs");
-            for (int fenceFd : fenceFds) {
-                close(fenceFd);
-            }
-        } else if (!p->valid()) {
-            ALOGE("Invalid sync fence created");
-            p.reset();
-        }
-    } else {
-        ALOGE("Create sync fence from invalid fd list of size 0");
+C2Fence _C2FenceFactory::CreateUnorderedMultiSyncFence(
+        const std::vector<int>& fenceFds, c2_status_t *status) {
+    if (status) {
+        *status = C2_OK;
     }
+
+    sp<Fence> finalFence;
+    std::vector<sp<Fence>> fences;
+
+    bool mergeFailed = false;
+    for (int fenceFd : fenceFds) {
+        if (fenceFd < 0) {
+            // ignore invalid fences
+            continue;
+        }
+        sp<Fence> fence = sp<Fence>::make(fenceFd);
+
+        // If we could not create an sp, further sp-s will also fail.
+        if (fence == nullptr) {
+            if (status) {
+                *status = C2_NO_MEMORY;
+            }
+            break;
+        }
+        fences.push_back(fence);
+
+        if (finalFence == nullptr) {
+            finalFence = fence;
+        } else {
+            sp<Fence> mergedFence = Fence::merge("syncFence", finalFence, fence);
+            if (mergedFence == nullptr || mergedFence == Fence::NO_FENCE) {
+                ALOGE_IF(!mergeFailed, "Could not merge fences for sync fence.");
+                mergeFailed = true;
+                if (status) {
+                    *status = (mergedFence == nullptr) ? C2_NO_MEMORY : C2_CORRUPTED;
+                }
+
+                if (mergedFence == nullptr) {
+                    break;
+                }
+                // If we cannot merge one of the fences, the best course of action
+                // is to keep going, as the alternative would be to clear all fences
+                // (making this a null fence) but that will always be ready.
+            } else {
+                finalFence = mergedFence;
+            }
+        }
+    }
+
+    // we may have ended up with a single or no fence due to merging failures or
+    // invalid fds.
+    if (fences.size() == 0) {
+        // we have no fds, we have a null fence.
+        return C2Fence();
+    }
+
+    std::shared_ptr<C2Fence::Impl> p;
+
+    if (fences.size() == 1) {
+        // We have a single sync fd. We don't need the merged fence, which is
+        // already simply that sole fence.
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(finalFence);
+    } else {
+        // if we couldn't merge any fences just use the last one
+        if (finalFence == fences[0]) {
+            finalFence = fences.back();
+        }
+
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences, finalFence);
+    }
+
+    if (!p) {
+        ALOGE("Failed to allocate sync fence impl closing FDs");
+        // all fds were moved into Fence objects which will close them.
+        if (status) {
+            *status = C2_NO_MEMORY;
+        }
+        return C2Fence();
+    }
+
+    return C2Fence(p);
+}
+
+C2Fence _C2FenceFactory::CreateMultiSyncFence(
+        const std::vector<int>& fenceFds, c2_status_t *status) {
+    if (status) {
+        *status = C2_OK;
+    }
+
+    std::vector<sp<Fence>> fences;
+
+    for (int fenceFd : fenceFds) {
+        if (fenceFd < 0) {
+            // ignore invalid fences
+            continue;
+        }
+        sp<Fence> fence = sp<Fence>::make(fenceFd);
+
+        // If we could not create an sp, keep going with the existing fences.
+        if (fence == nullptr) {
+            if (status) {
+                *status = C2_NO_MEMORY;
+            }
+            break;
+        }
+        fences.push_back(fence);
+    }
+
+    // we may have ended up with a single or no fence due to invalid fds.
+    if (fences.size() == 0) {
+        // we have no fds, we have a null fence.
+        return C2Fence();
+    }
+
+    std::shared_ptr<C2Fence::Impl> p;
+
+    if (fences.size() == 1) {
+        // We have a single sync fd, this is a simple sync fence.
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences[0]);
+    } else {
+        p = std::make_shared<_C2FenceFactory::SyncFenceImpl>(fences, fences.back());
+    }
+
+    if (!p) {
+        ALOGE("Failed to allocate sync fence impl closing FDs");
+        // all fds were moved into Fence objects which will close them.
+        if (status) {
+            *status = C2_NO_MEMORY;
+        }
+        return C2Fence();
+    }
+
     return C2Fence(p);
 }
 
@@ -521,7 +784,8 @@
     return fence.mImpl? fence.mImpl->createNativeHandle() : nullptr;
 }
 
-C2Fence _C2FenceFactory::CreateFromNativeHandle(const native_handle_t* handle) {
+C2Fence _C2FenceFactory::CreateFromNativeHandle(
+        const native_handle_t* handle, bool takeOwnership) {
     if (!handle) {
         return C2Fence();
     }
@@ -529,11 +793,14 @@
     std::shared_ptr<C2Fence::Impl> p;
     switch (type) {
         case C2Fence::Impl::SYNC_FENCE:
-            p = SyncFenceImpl::CreateFromNativeHandle(handle);
+            p = SyncFenceImpl::CreateFromNativeHandle(handle, takeOwnership);
             break;
         default:
             ALOGV("Unsupported fence type %d", type);
-            // Nothing else to do. The handle is owned by the caller.
+            // Still close the handle here if taking ownership.
+            if (takeOwnership) {
+                (void) native_handle_close(handle);
+            }
             // return a null-fence in this case
             break;
     }
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index e7fd14f..0987da2 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -478,13 +478,25 @@
 
 class _C2BlockPoolCache {
 public:
-    _C2BlockPoolCache() : mBlockPoolSeqId(C2BlockPool::PLATFORM_START + 1) {}
+    _C2BlockPoolCache() : mBlockPoolSeqId(C2BlockPool::PLATFORM_START + 1) {
+        mBqPoolDeferDeallocAfterStop = false;
+#ifdef __ANDROID_APEX__
+        bool stopHalBeforeSurface = ::android::base::GetBoolProperty(
+                "debug.codec2.stop_hal_before_surface", false);
+        if (!stopHalBeforeSurface) {
+            mBqPoolDeferDeallocAfterStop =
+                    ::android::base::GetIntProperty(
+                            "debug.codec2.bqpool_dealloc_after_stop", 0) != 0;
+        }
+#endif
+    }
 
 private:
     c2_status_t _createBlockPool(
             C2PlatformAllocatorDesc &allocatorParam,
             std::vector<std::shared_ptr<const C2Component>> components,
             C2BlockPool::local_id_t poolId,
+            bool deferDeallocAfterStop,
             std::shared_ptr<C2BlockPool> *pool) {
         std::shared_ptr<C2AllocatorStore> allocatorStore =
                 GetCodec2PlatformAllocatorStore();
@@ -548,6 +560,11 @@
                 if (res == C2_OK) {
                     std::shared_ptr<C2BlockPool> ptr(
                             new C2BufferQueueBlockPool(allocator, poolId), deleter);
+                    if (deferDeallocAfterStop) {
+                        std::shared_ptr<C2BufferQueueBlockPool> bqPool =
+                            std::static_pointer_cast<C2BufferQueueBlockPool>(ptr);
+                        bqPool->setDeferDeallocationAfterStop();
+                    }
                     *pool = ptr;
                     mBlockPools[poolId] = ptr;
                     mComponents[poolId].insert(
@@ -603,7 +620,8 @@
             std::vector<std::shared_ptr<const C2Component>> components,
             std::shared_ptr<C2BlockPool> *pool) {
         std::unique_lock lock(mMutex);
-        return _createBlockPool(allocator, components, mBlockPoolSeqId++, pool);
+        return _createBlockPool(allocator, components, mBlockPoolSeqId++,
+                                mBqPoolDeferDeallocAfterStop, pool);
     }
 
 
@@ -638,7 +656,7 @@
             C2PlatformAllocatorDesc allocator;
             allocator.allocatorId = C2PlatformAllocatorStore::BUFFERQUEUE;
             return _createBlockPool(
-                    allocator, {component}, blockPoolId, pool);
+                    allocator, {component}, blockPoolId, mBqPoolDeferDeallocAfterStop, pool);
         }
         return C2_NOT_FOUND;
     }
@@ -651,6 +669,8 @@
 
     std::map<C2BlockPool::local_id_t, std::weak_ptr<C2BlockPool>> mBlockPools;
     std::map<C2BlockPool::local_id_t, std::vector<std::weak_ptr<const C2Component>>> mComponents;
+
+    bool mBqPoolDeferDeallocAfterStop;
 };
 
 static std::unique_ptr<_C2BlockPoolCache> sBlockPoolCache =
diff --git a/media/codec2/vndk/include/C2BqBufferPriv.h b/media/codec2/vndk/include/C2BqBufferPriv.h
index 320b192..806932c 100644
--- a/media/codec2/vndk/include/C2BqBufferPriv.h
+++ b/media/codec2/vndk/include/C2BqBufferPriv.h
@@ -28,6 +28,94 @@
 class GraphicBuffer;
 }  // namespace android
 
+/**
+ * BufferQueue based BlockPool.
+ *
+ * This creates graphic blocks from BufferQueue. BufferQueue here is HIDL-ized IGBP.
+ * HIDL-ized IGBP enables vendor HAL to call IGBP interfaces via HIDL over process boundary.
+ * HIDL-ized IGBP is called as HGBP. HGBP had been used from multiple places in android,
+ * but now this is the only place HGBP is still used.
+ *
+ * Initially there is no HGBP configured, in the case graphic blocks are allocated
+ * from gralloc directly upon \fetchGraphicBlock() requests.
+ *
+ * HGBP can be configured as null as well, in the case graphic blocks are allocated
+ * from gralloc directly upon \fetchGraphicBlock() requests.
+ *
+ * If a specific HGBP is configured, the HGBP acts as an allocator for creating graphic blocks.
+ *
+ *
+ * HGBP/IGBP and the BlockPool
+ *
+ * GraphicBuffer(s) from BufferQueue(IGBP/IGBC) are based on slot id.
+ * A created GraphicBuffer occupies a slot(so the GraphicBuffer has a slot-id).
+ * A GraphicBuffer is produced and consumed and recyled based on the slot-id
+ * w.r.t. BufferQueue.
+ *
+ * HGBP::dequeueBuffer() returns a slot id where the slot has an available GraphicBuffer.
+ * If it is necessary, HGBP allocates a new GraphicBuffer to the slot and indicates
+ * that a new buffer is allocated as return flag.
+ * To retrieve the GraphicBuffer, HGBP::requestBuffer() along with the slot id
+ * is required. In order to save HGBP remote calls, the blockpool caches the
+ * allocated GraphicBuffer(s) along with the slot information.
+ *
+ * The blockpool provides C2GraphicBlock upon \fetchGraphicBlock().
+ * The C2GraphicBlock has a native handle, which is extracted from a GraphicBuffer
+ * and then cloned for independent life-cycle with the GraphicBuffer. The GraphicBuffer
+ * is allocated by HGBP::dequeueBuffer() and retrieved by HGBP::requestBuffer()
+ * if there is a HGBP configured.
+ *
+ *
+ * Life-cycle of C2GraphicBlock
+ *
+ * The decoder HAL writes a decoded frame into C2GraphicBlock. Upon
+ * completion, the component sends the block to the client in the remote process
+ * (i.e. to MediaCodec). The remote process renders the frame into the output surface
+ * via IGBP::queueBuffer() (Note: this is not hidlized.).
+ *
+ * If the decoder HAL destroys the C2GraphicBlock without transferring to the
+ * client, the destroy request goes to the BlockPool. Then
+ * the BlockPool free the associated GraphicBuffer from a slot to
+ * HGBP in order to recycle via HGBP::cancelBuffer().
+ *
+ *
+ * Clearing the Cache(GraphicBuffer)
+ *
+ * When the output surface is switched to a new surface, The GraphicBuffers from
+ * the old surface is either migrated or cleared.
+ *
+ * The GraphicBuffer(s) still in use are migrated to a new surface during
+ * configuration via HGBP::attachBuffer(). The GraphicBuffer(s) not in use are
+ * cleared from the cache inside the BlockPool.
+ *
+ * When the surface is switched to a null surface, all the
+ * GraphicBuffers in the cache are cleared.
+ *
+ *
+ * Workaround w.r.t. b/322731059 (Deferring cleaning the cache)
+ *
+ * Some vendor devices have issues with graphic buffer lifecycle management,
+ * where the graphic buffers get released even when the cloned native handles
+ * in the remote process are not closed yet. This issue led to rare crashes
+ * for those devices when the cache is cleared early.
+ *
+ * We workarounded the crash by deferring the cleaning of the cache.
+ * The workaround is not enabled by default, and can be enabled via a
+ * system property as shown below:
+ *
+ *        'debug.codec2.bqpool_dealloc_after_stop' = 1
+ *
+ * Configuring the debug flag will call \::setDeferDeallocationAfterStop()
+ * after the blockpool creation. This will enable the deferring.
+ *
+ * After enabling the deferring, clearing the GraphicBuffer is delayed until
+ *  1) \::clearDeferredBlocks() is called.
+ *        Typically after HAL processes stop() request.
+ *  2) Or a new ::fetchGraphicBlock() is called.
+ *
+ *  Since the deferring will delay the deallocation, the deferring will result
+ *  in more memory consumption during the brief period.
+ */
 class C2BufferQueueBlockPool : public C2BlockPool {
 public:
     C2BufferQueueBlockPool(const std::shared_ptr<C2Allocator> &allocator, const local_id_t localId);
@@ -77,6 +165,8 @@
      * is configured as nullptr, unique id which is bundled in native_handle is zero.
      *
      * \param producer      the IGBP, which will be used to fetch blocks
+     *                      This could be null, in the case this blockpool will
+     *                      allocate backed GraphicBuffer via allocator(gralloc).
      */
     virtual void configureProducer(const android::sp<HGraphicBufferProducer> &producer);
 
@@ -89,6 +179,8 @@
      * is configured as nullptr, unique id which is bundled in native_handle is zero.
      *
      * \param producer      the IGBP, which will be used to fetch blocks
+     *                      This could be null, in the case this blockpool will
+     *                      allocate backed GraphicBuffer via allocator(gralloc).
      * \param syncMemory    Shared memory for synchronization of allocation & deallocation.
      * \param bqId          Id of IGBP
      * \param generationId  Generation Id for rendering output
@@ -110,6 +202,26 @@
      */
     virtual void invalidate();
 
+    /**
+     * Defer deallocation of cached blocks.
+     *
+     * Deallocation of cached blocks will be deferred until
+     * \clearDeferredBlocks() is called. Or a new block allocation is
+     * requested by \fetchGraphicBlock().
+     */
+    void setDeferDeallocationAfterStop();
+
+
+    /**
+     * Clear deferred blocks.
+     *
+     * Deallocation of cached blocks can be deferred by
+     * \setDeferDeallocationAfterStop().
+     * clear(deallocate) those deferred cached blocks explicitly.
+     * Use this interface, if the blockpool could be inactive indefinitely.
+     */
+    void clearDeferredBlocks();
+
 private:
     const std::shared_ptr<C2Allocator> mAllocator;
     const local_id_t mLocalId;
diff --git a/media/codec2/vndk/include/C2FenceFactory.h b/media/codec2/vndk/include/C2FenceFactory.h
index 4f974ca..cabd5d9 100644
--- a/media/codec2/vndk/include/C2FenceFactory.h
+++ b/media/codec2/vndk/include/C2FenceFactory.h
@@ -23,13 +23,19 @@
 #include <android-base/unique_fd.h>
 
 /*
- * Create a list of fds from fence
+ * Extract a list of sync fence fds from a potentially multi-sync C2Fence.
+ * This will return dupped file descriptors of the fences used to creating the
+ * sync fence. Specifically, for an unordered mult-sync fence, the merged
+ * singular fence will not be returned even though it is created aspart of
+ * constructing the C2Fence object. On the other hand, for a single fd sync
+ * fence, the returned list will contain the sole file descriptor.
  *
  * \param fence   C2Fence object from which associated
  *                file descriptors need to be extracted
- * \return a vector of fds otherwise return vector of size 0
+ * \return a vector of sync fence fds. This will be a vector of size 0 if C2Fence
+ *         is not a sync fence. The caller is responsible for closing the
+ *         fds in the returned vector.
  */
-
 std::vector<int> ExtractFdsFromCodec2SyncFence(const C2Fence& fence);
 
 class C2SurfaceSyncMemory;
@@ -54,20 +60,76 @@
             uint32_t waitId);
 
     /*
-     * Create C2Fence from a fence file fd.
+     * Create C2Fence from a sync fence fd.
      *
-     * \param fenceFd           Fence file descriptor.
+     * \param fenceFd           Sync fence file descriptor.
      *                          It will be owned and closed by the returned fence object.
+     * \param validate          If true, the fence fd will be validated to ensure
+     *                          it is a valid pending sync fence fd.
      */
-    static C2Fence CreateSyncFence(int fenceFd);
+    static C2Fence CreateSyncFence(int fenceFd, bool validate = true);
 
     /*
-     * Create C2Fence from list of fence file fds.
+     * Create C2Fence from list of sync fence fds, while also merging them to
+     * create a singular fence, which can be used as a backward compatible sync
+     * fence.
      *
-     * \param fenceFds          Vector of file descriptor for fence.
-     *                          It will be owned and closed by the returned fence object.
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
      */
-    static C2Fence CreateMultipleFdSyncFence(const std::vector<int>& fenceFds);
+    [[deprecated("Use CreateUnorderedMultiSyncFence instead.")]]
+    static C2Fence CreateMultipleFdSyncFence(const std::vector<int>& fenceFds) {
+        return CreateUnorderedMultiSyncFence(fenceFds);
+    }
+
+    /*
+     * Create C2Fence from a list of unordered sync fence fds, while also merging
+     * them to create a singular fence, which can be used as a backward compatible
+     * sync fence.
+     *
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
+     * \param status     Optional pointer to a status field. If not null, it will be
+     *                   updated with the status of the operation. Possible values
+     *                   are:
+     *                   - C2_OK: The operation succeeded.
+     *                   - C2_NO_MEMORY: The operation failed because of lack of
+     *                     memory.
+     *                   - C2_CORRUPTED: The operation failed because the sync
+     *                     fence fds could bot be merged.
+     * \return           A C2Fence object representing the sync fence fds, or
+     *                   an empty C2Fence if the no C2Fence could be created.
+     *                   It is possible for the operation to fail but still return
+     *                   a possibly viable C2Fence object, e.g. if the merge
+     *                   operation failed only partially. Similarly, it is possible
+     *                   for the operation to succeed but still return an empty
+     *                   C2Fence object, e.g. if all fence fds were invalid.
+     */
+    static C2Fence CreateUnorderedMultiSyncFence(
+            const std::vector<int>& fenceFds, c2_status_t *status = nullptr /* nullable */);
+
+    /*
+     * Create C2Fence from a list of sync fence fds. Waiting for the last fence
+     * must guarantee that all other fences are also signaled.
+     *
+     * \param fenceFds   Vector of sync fence file descriptors.
+     *                   All file descriptors will be owned (and closed) by
+     *                   the returned fence object.
+     * \param status     Optional pointer to a status field. If not null, it will be
+     *                   updated with the status of the operation. Possible values
+     *                   are:
+     *                   - C2_OK: The operation succeeded.
+     *                   - C2_NO_MEMORY: The operation failed because of lack of
+     *                     memory.
+     * \return           A C2Fence object representing the sync fence fds, or
+     *                   an empty C2Fence if the operation failed.  It is possible
+     *                   for the operation to succeed but still return an empty
+     *                   C2Fence object, e.g. if all fence fds were invalid.
+     */
+    static C2Fence CreateMultiSyncFence(
+            const std::vector<int>& fenceFds, c2_status_t *status = nullptr /* nullable */);
 
     /*
      * Create C2Fence from an fd created by pipe()/pipe2() syscall.
@@ -97,13 +159,18 @@
 
     /*
      * Create C2Fence from a native handle.
-
+     *
      * \param handle           A native handle representing a fence
-     *                         The fd in the native handle will be duplicated, so the caller will
-     *                         still own the handle and have to close it.
+     * \param takeOwnership    If true, the native handle and the file descriptors
+     *                         within will be owned by the returned fence object.
+     *                         If false (default), the caller will still own the
+     *                         handle and its file descriptors and will have to
+     *                         close it.
+     *                         In either case the caller is responsible for
+     *                         deleting the native handle.
      */
-    static C2Fence CreateFromNativeHandle(const native_handle_t* handle);
+    static C2Fence CreateFromNativeHandle(
+            const native_handle_t* handle, bool takeOwnership = false);
 };
 
-
 #endif // STAGEFRIGHT_CODEC2_FENCE_FACTORY_H_
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index 48157c8..665f9fc 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -588,11 +588,22 @@
         return C2_BAD_VALUE;
     }
 
+    void clearDeferredBlocks_l() {
+        if (mHavingDeallocationDeferred) {
+            mHavingDeallocationDeferred = false;
+            for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
+                mBuffersWithDeallocationDeferred[i].clear();
+            }
+        }
+    }
+
 public:
     Impl(const std::shared_ptr<C2Allocator> &allocator)
         : mInit(C2_OK), mProducerId(0), mGeneration(0),
           mConsumerUsage(0), mDqFailure(0), mLastDqTs(0),
-          mLastDqLogTs(0), mAllocator(allocator), mIgbpValidityToken(std::make_shared<int>(0)) {
+          mLastDqLogTs(0), mAllocator(allocator),
+          mDeferDeallocationAfterStop(false),
+          mHavingDeallocationDeferred(false), mIgbpValidityToken(std::make_shared<int>(0)) {
     }
 
     ~Impl() {
@@ -634,6 +645,7 @@
             }
         }
         if (mProducerId == 0) {
+            clearDeferredBlocks_l();
             std::shared_ptr<C2GraphicAllocation> alloc;
             c2_status_t err = mAllocator->newGraphicAllocation(
                     width, height, format, usage, &alloc);
@@ -692,6 +704,7 @@
                            uint32_t generation,
                            uint64_t usage,
                            bool bqInformation) {
+        bool toNullSurface = false;
         std::shared_ptr<C2SurfaceSyncMemory> c2SyncMem;
         if (syncHandle) {
             if (!producer) {
@@ -714,6 +727,9 @@
                 mProducerId = producerId;
                 mGeneration = bqInformation ? generation : 0;
             } else {
+                if (mProducer) {
+                    toNullSurface = true;
+                }
                 mProducer = nullptr;
                 mProducerId = 0;
                 mGeneration = 0;
@@ -760,6 +776,17 @@
                 // old buffers should not be cancelled since the associated IGBP
                 // is no longer valid.
                 mIgbpValidityToken = std::make_shared<int>(0);
+                if (mDeferDeallocationAfterStop) {
+                    if (toNullSurface) {
+                        mHavingDeallocationDeferred = true;
+                        for (int i = 0; i < NUM_BUFFER_SLOTS; ++i) {
+                            mBuffersWithDeallocationDeferred[i] = mBuffers[i];
+                        }
+                    }
+                }
+            }
+            if (!toNullSurface) {
+                clearDeferredBlocks_l();
             }
             if (mInvalidated) {
                 mIgbpValidityToken = std::make_shared<int>(0);
@@ -811,6 +838,16 @@
         }
     }
 
+    void setDeferDeallocationAfterStop() {
+        std::scoped_lock<std::mutex> lock(mMutex);
+        mDeferDeallocationAfterStop = true;
+    }
+
+    void clearDeferredBlocks() {
+        std::scoped_lock<std::mutex> lock(mMutex);
+        clearDeferredBlocks_l();
+    }
+
 private:
     friend struct C2BufferQueueBlockPoolData;
 
@@ -833,6 +870,14 @@
     sp<GraphicBuffer> mBuffers[NUM_BUFFER_SLOTS];
     std::weak_ptr<C2BufferQueueBlockPoolData> mPoolDatas[NUM_BUFFER_SLOTS];
 
+    // In order to workaround b/322731059,
+    // deallocating buffers due to stop using the current surface
+    // could be deferred until the component calling stop or a
+    // new allocation being requested.
+    bool mDeferDeallocationAfterStop;
+    bool mHavingDeallocationDeferred;
+    sp<GraphicBuffer> mBuffersWithDeallocationDeferred[NUM_BUFFER_SLOTS];
+
     std::mutex mSyncMemMutex;
     std::shared_ptr<C2SurfaceSyncMemory> mSyncMem;
     std::shared_ptr<C2SurfaceSyncMemory> mOldMem;
@@ -1178,3 +1223,15 @@
     }
 }
 
+void C2BufferQueueBlockPool::setDeferDeallocationAfterStop() {
+    if (mImpl) {
+        mImpl->setDeferDeallocationAfterStop();
+    }
+}
+
+void C2BufferQueueBlockPool::clearDeferredBlocks() {
+    if (mImpl) {
+        mImpl->clearDeferredBlocks();
+    }
+}
+
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index ba231c1..a1551f8 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -46,9 +46,9 @@
     ],
     static_libs: [
         "aaudio-aidl-cpp",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
-        "audio-permission-aidl-cpp",
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "av-types-aidl-cpp",
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 21321b9..e19d526 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -574,7 +574,7 @@
      * For privacy, the following usages can not be recorded: AAUDIO_VOICE_COMMUNICATION*,
      * AAUDIO_USAGE_NOTIFICATION*, AAUDIO_USAGE_ASSISTANCE* and {@link #AAUDIO_USAGE_ASSISTANT}.
      *
-     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Build.VERSION_CODES</a>,
+     * On <a href="/reference/android/os/Build.VERSION_CODES#Q">Q</a>,
      * this means only {@link #AAUDIO_USAGE_MEDIA} and {@link #AAUDIO_USAGE_GAME} may be captured.
      *
      * See <a href="/reference/android/media/AudioAttributes.html#ALLOW_CAPTURE_BY_ALL">
diff --git a/media/libaaudio/src/client/AudioStreamInternal.cpp b/media/libaaudio/src/client/AudioStreamInternal.cpp
index b2e93f0..fa3f5a0 100644
--- a/media/libaaudio/src/client/AudioStreamInternal.cpp
+++ b/media/libaaudio/src/client/AudioStreamInternal.cpp
@@ -620,17 +620,19 @@
                                                  audio_port_handle_t *portHandle) {
     ALOGV("%s() called", __func__);
     if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
+        ALOGE("%s() getServiceHandle() is invalid", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_result_t result =  mServiceInterface.startClient(mServiceStreamHandleInfo,
                                                             client, attr, portHandle);
-    ALOGV("%s(%d) returning %d", __func__, *portHandle, result);
+    ALOGV("%s(), got %d, returning %d", __func__, *portHandle, result);
     return result;
 }
 
 aaudio_result_t AudioStreamInternal::stopClient(audio_port_handle_t portHandle) {
     ALOGV("%s(%d) called", __func__, portHandle);
     if (getServiceHandle() == AAUDIO_HANDLE_INVALID) {
+        ALOGE("%s(%d) getServiceHandle() is invalid", __func__, portHandle);
         return AAUDIO_ERROR_INVALID_STATE;
     }
     aaudio_result_t result = mServiceInterface.stopClient(mServiceStreamHandleInfo, portHandle);
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index 8595308..255bd0f 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -261,6 +261,11 @@
 
 void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
             audio_port_handle_t deviceId) {
+    // Check for an invalid deviceId. Why change to UNSPECIFIED?
+    if (deviceId == AAUDIO_UNSPECIFIED) {
+        ALOGE("%s(, deviceId = AAUDIO_UNSPECIFIED)! Why?", __func__);
+        return;
+    }
     // Device routing is a common source of errors and DISCONNECTS.
     // Please leave this log in place. If there is a bug then this might
     // get called after the stream has been deleted so log before we
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 58e4920..85cc9bd 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -1174,6 +1174,13 @@
     mSampleRate = rate;
     mProxy->setSampleRate(effectiveSampleRate);
 
+    mediametrics::LogItem(mMetricsId)
+            .set(AMEDIAMETRICS_PROP_EVENT, AMEDIAMETRICS_PROP_EVENT_VALUE_SETSAMPLERATE)
+            .set(AMEDIAMETRICS_PROP_PREFIX_EFFECTIVE AMEDIAMETRICS_PROP_SAMPLERATE,
+                    static_cast<int32_t>(effectiveSampleRate))
+            .set(AMEDIAMETRICS_PROP_SAMPLERATE, static_cast<int32_t>(rate))
+            .record();
+
     return NO_ERROR;
 }
 
diff --git a/media/libaudioclient/tests/Android.bp b/media/libaudioclient/tests/Android.bp
index 9c3ad44..ddf14a3 100644
--- a/media/libaudioclient/tests/Android.bp
+++ b/media/libaudioclient/tests/Android.bp
@@ -134,6 +134,9 @@
         "libaudiomanager",
         "libaudiopolicy",
     ],
+    cflags: [
+        "-Wthread-safety",
+    ],
     data: ["bbb*.raw"],
     srcs: [
         "audio_test_utils.cpp",
diff --git a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
index 0be1d7e..7f55e48 100644
--- a/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
+++ b/media/libaudioclient/tests/audio_aidl_legacy_conversion_tests.cpp
@@ -483,28 +483,8 @@
                                  AudioDeviceAddress::make<AudioDeviceAddress::Tag::alsa>(
                                          std::vector<int32_t>{1, 2}))));
 
-TEST(AnonymizedBluetoothAddressRoundTripTest, Legacy2Aidl2Legacy) {
-    const std::vector<uint8_t> sAnonymizedAidlAddress =
-            std::vector<uint8_t>{0xFD, 0xFF, 0xFF, 0xFF, 0xAB, 0xCD};
-    const std::string sAnonymizedLegacyAddress = std::string("XX:XX:XX:XX:AB:CD");
-    auto device = legacy2aidl_audio_device_AudioDevice(AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                                                       sAnonymizedLegacyAddress);
-    ASSERT_TRUE(device.ok());
-    ASSERT_EQ(AudioDeviceAddress::Tag::mac, device.value().address.getTag());
-    ASSERT_EQ(sAnonymizedAidlAddress, device.value().address.get<AudioDeviceAddress::mac>());
-
-    audio_devices_t legacyType;
-    std::string legacyAddress;
-    status_t status =
-            aidl2legacy_AudioDevice_audio_device(device.value(), &legacyType, &legacyAddress);
-    ASSERT_EQ(OK, status);
-    EXPECT_EQ(legacyType, AUDIO_DEVICE_OUT_BLUETOOTH_A2DP);
-    EXPECT_EQ(sAnonymizedLegacyAddress, legacyAddress);
-}
-
 class AudioFormatDescriptionRoundTripTest : public testing::TestWithParam<AudioFormatDescription> {
 };
-
 TEST_P(AudioFormatDescriptionRoundTripTest, Aidl2Legacy2Aidl) {
     const auto initial = GetParam();
     auto conv = aidl2legacy_AudioFormatDescription_audio_format_t(initial);
diff --git a/media/libaudioclient/tests/audio_test_utils.cpp b/media/libaudioclient/tests/audio_test_utils.cpp
index 9a202cc3..1599839 100644
--- a/media/libaudioclient/tests/audio_test_utils.cpp
+++ b/media/libaudioclient/tests/audio_test_utils.cpp
@@ -28,25 +28,35 @@
 
 void OnAudioDeviceUpdateNotifier::onAudioDeviceUpdate(audio_io_handle_t audioIo,
                                                       audio_port_handle_t deviceId) {
-    std::unique_lock<std::mutex> lock{mMutex};
     ALOGI("%s: audioIo=%d deviceId=%d", __func__, audioIo, deviceId);
-    mAudioIo = audioIo;
-    mDeviceId = deviceId;
+    {
+        std::lock_guard lock(mMutex);
+        mAudioIo = audioIo;
+        mDeviceId = deviceId;
+    }
     mCondition.notify_all();
 }
 
 status_t OnAudioDeviceUpdateNotifier::waitForAudioDeviceCb(audio_port_handle_t expDeviceId) {
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
     if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
         (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
         mCondition.wait_for(lock, std::chrono::milliseconds(500));
         if (mAudioIo == AUDIO_IO_HANDLE_NONE ||
-            (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId))
+            (expDeviceId != AUDIO_PORT_HANDLE_NONE && expDeviceId != mDeviceId)) {
             return TIMED_OUT;
+        }
     }
     return OK;
 }
 
+std::pair<audio_io_handle_t, audio_port_handle_t>
+OnAudioDeviceUpdateNotifier::getLastPortAndDevice() const {
+    std::lock_guard lock(mMutex);
+    return {mAudioIo, mDeviceId};
+}
+
 AudioPlayback::AudioPlayback(uint32_t sampleRate, audio_format_t format,
                              audio_channel_mask_t channelMask, audio_output_flags_t flags,
                              audio_session_t sessionId, AudioTrack::transfer_type transferType,
@@ -147,9 +157,8 @@
 }
 
 void AudioPlayback::onBufferEnd() {
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::lock_guard lock(mMutex);
     mStopPlaying = true;
-    mCondition.notify_all();
 }
 
 status_t AudioPlayback::fillBuffer() {
@@ -187,7 +196,12 @@
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
     size_t totalFrameCount = mMemCapacity / mTrack->frameSize();
-    while (!mStopPlaying && counter < maxTries) {
+    bool stopPlaying;
+    {
+        std::lock_guard lock(mMutex);
+        stopPlaying = mStopPlaying;
+    }
+    while (!stopPlaying && counter < maxTries) {
         uint32_t currPosition;
         mTrack->getPosition(&currPosition);
         if (currPosition >= totalFrameCount) counter++;
@@ -213,7 +227,10 @@
             mTrack->start();
         }
         std::this_thread::sleep_for(std::chrono::milliseconds(WAIT_PERIOD_MS));
+        std::lock_guard lock(mMutex);
+        stopPlaying = mStopPlaying;
     }
+    std::lock_guard lock(mMutex);
     if (!mStopPlaying && counter == maxTries) return TIMED_OUT;
     return OK;
 }
@@ -228,8 +245,10 @@
 }
 
 void AudioPlayback::stop() {
-    std::unique_lock<std::mutex> lock{mMutex};
-    mStopPlaying = true;
+    {
+        std::lock_guard lock(mMutex);
+        mStopPlaying = true;
+    }
     if (mState != PLAY_STOPPED && mState != PLAY_NO_INIT) {
         int32_t msec = 0;
         (void)mTrack->pendingDuration(&msec);
@@ -257,10 +276,13 @@
         return 0;
     }
 
-    // no more frames to read
-    if (mNumFramesReceived >= mNumFramesToRecord || mStopRecording) {
-        mStopRecording = true;
-        return 0;
+    {
+        std::lock_guard l(mMutex);
+        // no more frames to read
+        if (mNumFramesReceived >= mNumFramesToRecord || mStopRecording) {
+            mStopRecording = true;
+            return 0;
+        }
     }
 
     int64_t timeUs = 0, position = 0, timeNs = 0;
@@ -272,6 +294,7 @@
         ts.getBestTimestamp(&position, &timeNs, ExtendedTimestamp::TIMEBASE_MONOTONIC, &location) ==
                 OK) {
         // Use audio timestamp.
+        std::lock_guard l(mMutex);
         timeUs = timeNs / 1000 -
                  (position - mNumFramesReceived + mNumFramesLost) * usPerSec / mSampleRate;
     } else {
@@ -300,6 +323,7 @@
         } else {
             numLostBytes = 0;
         }
+        std::lock_guard l(mMutex);
         const int64_t timestampUs =
                 ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
                 mRecord->getSampleRate();
@@ -313,6 +337,7 @@
     if (buffer.size() == 0) {
         ALOGW("Nothing is available from AudioRecord callback buffer");
     } else {
+        std::lock_guard l(mMutex);
         const size_t bufferSize = buffer.size();
         const int64_t timestampUs =
                 ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
@@ -324,9 +349,12 @@
     }
 
     if (tmpQueue.size() > 0) {
-        std::unique_lock<std::mutex> lock{mMutex};
-        for (auto it = tmpQueue.begin(); it != tmpQueue.end(); it++)
-            mBuffersReceived.push_back(std::move(*it));
+        {
+            std::lock_guard lock(mMutex);
+            mBuffersReceived.insert(mBuffersReceived.end(),
+                                    std::make_move_iterator(tmpQueue.begin()),
+                                    std::make_move_iterator(tmpQueue.end()));
+        }
         mCondition.notify_all();
     }
     return buffer.size();
@@ -334,17 +362,24 @@
 
 void AudioCapture::onOverrun() {
     ALOGV("received event overrun");
-    mBufferOverrun = true;
 }
 
 void AudioCapture::onMarker(uint32_t markerPosition) {
     ALOGV("received Callback at position %d", markerPosition);
-    mReceivedCbMarkerAtPosition = markerPosition;
+    {
+        std::lock_guard l(mMutex);
+        mReceivedCbMarkerAtPosition = markerPosition;
+    }
+    mMarkerCondition.notify_all();
 }
 
 void AudioCapture::onNewPos(uint32_t markerPosition) {
     ALOGV("received Callback at position %d", markerPosition);
-    mReceivedCbMarkerCount++;
+    {
+        std::lock_guard l(mMutex);
+        mReceivedCbMarkerCount = mReceivedCbMarkerCount.value_or(0) + 1;
+    }
+    mMarkerCondition.notify_all();
 }
 
 void AudioCapture::onNewIAudioRecord() {
@@ -362,20 +397,7 @@
       mFlags(flags),
       mSessionId(sessionId),
       mTransferType(transferType),
-      mAttributes(attributes) {
-    mFrameCount = 0;
-    mNotificationFrames = 0;
-    mNumFramesToRecord = 0;
-    mNumFramesReceived = 0;
-    mNumFramesLost = 0;
-    mBufferOverrun = false;
-    mMarkerPosition = 0;
-    mMarkerPeriod = 0;
-    mReceivedCbMarkerAtPosition = -1;
-    mReceivedCbMarkerCount = 0;
-    mState = REC_NO_INIT;
-    mStopRecording = false;
-}
+      mAttributes(attributes) {}
 
 AudioCapture::~AudioCapture() {
     if (mOutFileFd > 0) close(mOutFileFd);
@@ -484,7 +506,10 @@
 
 status_t AudioCapture::stop() {
     status_t status = OK;
-    mStopRecording = true;
+    {
+        std::lock_guard l(mMutex);
+        mStopRecording = true;
+    }
     if (mState != REC_STOPPED && mState != REC_NO_INIT) {
         if (mInputSource != AUDIO_SOURCE_DEFAULT) {
             bool state = false;
@@ -503,25 +528,32 @@
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
     size_t nonContig = 0;
-    while (mNumFramesReceived < mNumFramesToRecord) {
+    int64_t numFramesReceived;
+    {
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
+    }
+    while (numFramesReceived < mNumFramesToRecord) {
         AudioRecord::Buffer recordBuffer;
         recordBuffer.frameCount = mNotificationFrames;
         status_t status = mRecord->obtainBuffer(&recordBuffer, 1, &nonContig);
         if (OK == status) {
             const int64_t timestampUs =
-                    ((1000000LL * mNumFramesReceived) + (mRecord->getSampleRate() >> 1)) /
+                    ((1000000LL * numFramesReceived) + (mRecord->getSampleRate() >> 1)) /
                     mRecord->getSampleRate();
             RawBuffer buff{-1, timestampUs, static_cast<int32_t>(recordBuffer.size())};
             memcpy(buff.mData.get(), recordBuffer.data(), recordBuffer.size());
             buffer = std::move(buff);
-            mNumFramesReceived += recordBuffer.size() / mRecord->frameSize();
+            numFramesReceived += recordBuffer.size() / mRecord->frameSize();
             mRecord->releaseBuffer(&recordBuffer);
             counter = 0;
         } else if (WOULD_BLOCK == status) {
             // if not received a buffer for MAX_WAIT_TIME_MS, something has gone wrong
-            if (counter == maxTries) return TIMED_OUT;
-            counter++;
+            if (counter++ == maxTries) status = TIMED_OUT;
         }
+        std::lock_guard l(mMutex);
+        mNumFramesReceived = numFramesReceived;
+        if (TIMED_OUT == status) return status;
     }
     return OK;
 }
@@ -530,7 +562,8 @@
     if (REC_STARTED != mState) return INVALID_OPERATION;
     const int maxTries = MAX_WAIT_TIME_MS / WAIT_PERIOD_MS;
     int counter = 0;
-    std::unique_lock<std::mutex> lock{mMutex};
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
     while (mBuffersReceived.empty() && !mStopRecording && counter < maxTries) {
         mCondition.wait_for(lock, std::chrono::milliseconds(WAIT_PERIOD_MS));
         counter++;
@@ -548,7 +581,12 @@
 status_t AudioCapture::audioProcess() {
     RawBuffer buffer;
     status_t status = OK;
-    while (mNumFramesReceived < mNumFramesToRecord && status == OK) {
+    int64_t numFramesReceived;
+    {
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
+    }
+    while (numFramesReceived < mNumFramesToRecord && status == OK) {
         if (mTransferType == AudioRecord::TRANSFER_CALLBACK)
             status = obtainBufferCb(buffer);
         else
@@ -557,10 +595,52 @@
             const char* ptr = static_cast<const char*>(static_cast<void*>(buffer.mData.get()));
             write(mOutFileFd, ptr, buffer.mCapacity);
         }
+        std::lock_guard l(mMutex);
+        numFramesReceived = mNumFramesReceived;
     }
     return OK;
 }
 
+uint32_t AudioCapture::getMarkerPeriod() const {
+    std::lock_guard l(mMutex);
+    return mMarkerPeriod;
+}
+
+uint32_t AudioCapture::getMarkerPosition() const {
+    std::lock_guard l(mMutex);
+    return mMarkerPosition;
+}
+
+void AudioCapture::setMarkerPeriod(uint32_t markerPeriod) {
+    std::lock_guard l(mMutex);
+    mMarkerPeriod = markerPeriod;
+}
+
+void AudioCapture::setMarkerPosition(uint32_t markerPosition) {
+    std::lock_guard l(mMutex);
+    mMarkerPosition = markerPosition;
+}
+
+uint32_t AudioCapture::waitAndGetReceivedCbMarkerAtPosition() const {
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
+        android::base::ScopedLockAssertion lock_assertion(mMutex);
+        return mReceivedCbMarkerAtPosition.has_value();
+    });
+    return mReceivedCbMarkerAtPosition.value_or(~0);
+}
+
+uint32_t AudioCapture::waitAndGetReceivedCbMarkerCount() const {
+    std::unique_lock lock(mMutex);
+    android::base::ScopedLockAssertion lock_assertion(mMutex);
+    mMarkerCondition.wait_for(lock, std::chrono::seconds(3), [this]() {
+        android::base::ScopedLockAssertion lock_assertion(mMutex);
+        return mReceivedCbMarkerCount.has_value();
+    });
+    return mReceivedCbMarkerCount.value_or(0);
+}
+
 status_t listAudioPorts(std::vector<audio_port_v7>& portsVec) {
     int attempts = 5;
     status_t status;
diff --git a/media/libaudioclient/tests/audio_test_utils.h b/media/libaudioclient/tests/audio_test_utils.h
index 76e4642..022ecf3 100644
--- a/media/libaudioclient/tests/audio_test_utils.h
+++ b/media/libaudioclient/tests/audio_test_utils.h
@@ -19,14 +19,13 @@
 
 #include <sys/stat.h>
 #include <unistd.h>
-#include <atomic>
-#include <chrono>
-#include <cinttypes>
 #include <deque>
 #include <memory>
 #include <mutex>
 #include <thread>
+#include <utility>
 
+#include <android-base/thread_annotations.h>
 #include <binder/MemoryDealer.h>
 #include <media/AidlConversion.h>
 #include <media/AudioRecord.h>
@@ -63,13 +62,15 @@
 
 class OnAudioDeviceUpdateNotifier : public AudioSystem::AudioDeviceCallback {
   public:
-    audio_io_handle_t mAudioIo = AUDIO_IO_HANDLE_NONE;
-    audio_port_handle_t mDeviceId = AUDIO_PORT_HANDLE_NONE;
-    std::mutex mMutex;
-    std::condition_variable mCondition;
-
-    void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId);
+    void onAudioDeviceUpdate(audio_io_handle_t audioIo, audio_port_handle_t deviceId) override;
     status_t waitForAudioDeviceCb(audio_port_handle_t expDeviceId = AUDIO_PORT_HANDLE_NONE);
+    std::pair<audio_io_handle_t, audio_port_handle_t> getLastPortAndDevice() const;
+
+  private:
+    audio_io_handle_t mAudioIo GUARDED_BY(mMutex) = AUDIO_IO_HANDLE_NONE;
+    audio_port_handle_t mDeviceId GUARDED_BY(mMutex) = AUDIO_PORT_HANDLE_NONE;
+    mutable std::mutex mMutex;
+    std::condition_variable mCondition;
 };
 
 // Simple AudioPlayback class.
@@ -86,15 +87,14 @@
     status_t create();
     sp<AudioTrack> getAudioTrackHandle();
     status_t start();
-    status_t waitForConsumption(bool testSeek = false);
+    status_t waitForConsumption(bool testSeek = false) EXCLUDES(mMutex);
     status_t fillBuffer();
     status_t onProcess(bool testSeek = false);
-    virtual void onBufferEnd() override;
-    void stop();
+    void onBufferEnd() override EXCLUDES(mMutex);
+    void stop() EXCLUDES(mMutex);
 
-    bool mStopPlaying;
-    std::mutex mMutex;
-    std::condition_variable mCondition;
+    bool mStopPlaying GUARDED_BY(mMutex);
+    mutable std::mutex mMutex;
 
     enum State {
         PLAY_NO_INIT,
@@ -144,10 +144,10 @@
                  AudioRecord::transfer_type transferType = AudioRecord::TRANSFER_CALLBACK,
                  const audio_attributes_t* attributes = nullptr);
     ~AudioCapture();
-    size_t onMoreData(const AudioRecord::Buffer& buffer) override;
+    size_t onMoreData(const AudioRecord::Buffer& buffer) override EXCLUDES(mMutex);
     void onOverrun() override;
-    void onMarker(uint32_t markerPosition) override;
-    void onNewPos(uint32_t newPos) override;
+    void onMarker(uint32_t markerPosition) override EXCLUDES(mMutex);
+    void onNewPos(uint32_t newPos) override EXCLUDES(mMutex);
     void onNewIAudioRecord() override;
     status_t create();
     status_t setRecordDuration(float durationInSec);
@@ -156,21 +156,20 @@
     sp<AudioRecord> getAudioRecordHandle();
     status_t start(AudioSystem::sync_event_t event = AudioSystem::SYNC_EVENT_NONE,
                    audio_session_t triggerSession = AUDIO_SESSION_NONE);
-    status_t obtainBufferCb(RawBuffer& buffer);
-    status_t obtainBuffer(RawBuffer& buffer);
-    status_t audioProcess();
-    status_t stop();
+    status_t obtainBufferCb(RawBuffer& buffer) EXCLUDES(mMutex);
+    status_t obtainBuffer(RawBuffer& buffer) EXCLUDES(mMutex);
+    status_t audioProcess() EXCLUDES(mMutex);
+    status_t stop() EXCLUDES(mMutex);
+    uint32_t getMarkerPeriod() const EXCLUDES(mMutex);
+    uint32_t getMarkerPosition() const EXCLUDES(mMutex);
+    void setMarkerPeriod(uint32_t markerPeriod) EXCLUDES(mMutex);
+    void setMarkerPosition(uint32_t markerPosition) EXCLUDES(mMutex);
+    uint32_t waitAndGetReceivedCbMarkerAtPosition() const EXCLUDES(mMutex);
+    uint32_t waitAndGetReceivedCbMarkerCount() const EXCLUDES(mMutex);
 
-    uint32_t mFrameCount;
-    uint32_t mNotificationFrames;
-    int64_t mNumFramesToRecord;
-    int64_t mNumFramesReceived;
-    int64_t mNumFramesLost;
-    uint32_t mMarkerPosition;
-    uint32_t mMarkerPeriod;
-    uint32_t mReceivedCbMarkerAtPosition;
-    uint32_t mReceivedCbMarkerCount;
-    bool mBufferOverrun;
+    uint32_t mFrameCount = 0;
+    uint32_t mNotificationFrames = 0;
+    int64_t mNumFramesToRecord = 0;
 
     enum State {
         REC_NO_INIT,
@@ -191,14 +190,23 @@
 
     size_t mMaxBytesPerCallback = 2048;
     sp<AudioRecord> mRecord;
-    State mState;
-    bool mStopRecording;
+    State mState = REC_NO_INIT;
+    bool mStopRecording GUARDED_BY(mMutex) = false;
     std::string mFileName;
     int mOutFileFd = -1;
 
-    std::mutex mMutex;
+    mutable std::mutex mMutex;
     std::condition_variable mCondition;
-    std::deque<RawBuffer> mBuffersReceived;
+    std::deque<RawBuffer> mBuffersReceived GUARDED_BY(mMutex);
+
+    mutable std::condition_variable mMarkerCondition;
+    uint32_t mMarkerPeriod GUARDED_BY(mMutex) = 0;
+    uint32_t mMarkerPosition GUARDED_BY(mMutex) = 0;
+    std::optional<uint32_t> mReceivedCbMarkerCount GUARDED_BY(mMutex);
+    std::optional<uint32_t> mReceivedCbMarkerAtPosition GUARDED_BY(mMutex);
+
+    int64_t mNumFramesReceived GUARDED_BY(mMutex) = 0;
+    int64_t mNumFramesLost GUARDED_BY(mMutex) = 0;
 };
 
 #endif  // AUDIO_TEST_UTILS_H_
diff --git a/media/libaudioclient/tests/audiorecord_tests.cpp b/media/libaudioclient/tests/audiorecord_tests.cpp
index be6c581..f2fee8b 100644
--- a/media/libaudioclient/tests/audiorecord_tests.cpp
+++ b/media/libaudioclient/tests/audiorecord_tests.cpp
@@ -102,7 +102,10 @@
     }
 
     void TearDown() override {
-        if (mAC) ASSERT_EQ(OK, mAC->stop());
+        if (mAC) {
+            ASSERT_EQ(OK, mAC->stop());
+            mAC.clear();
+        }
     }
 };
 
@@ -120,10 +123,12 @@
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->addAudioDeviceCallback(cb));
     EXPECT_EQ(OK, mAC->start()) << "record creation failed";
     EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, cbOld->mAudioIo);
-    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
+    const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
+    const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
     EXPECT_EQ(BAD_VALUE, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cbOld));
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->removeAudioDeviceCallback(cb));
@@ -166,31 +171,33 @@
 }
 
 TEST_F(AudioRecordTest, TestGetSetMarker) {
-    mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
-    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition))
+    mAC->setMarkerPosition((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setMarkerPosition(mAC->getMarkerPosition()))
             << "setMarkerPosition() failed";
     uint32_t marker;
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker))
             << "getMarkerPosition() failed";
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
     EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
-    EXPECT_EQ(marker, mAC->mMarkerPosition)
+    EXPECT_EQ(marker, mAC->getMarkerPosition())
             << "configured marker and received marker are different";
-    EXPECT_EQ(mAC->mReceivedCbMarkerAtPosition, mAC->mMarkerPosition)
+    EXPECT_EQ(mAC->waitAndGetReceivedCbMarkerAtPosition(), mAC->getMarkerPosition())
             << "configured marker and received cb marker are different";
 }
 
 TEST_F(AudioRecordTest, TestGetSetMarkerPeriodical) {
-    mAC->mMarkerPeriod = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
-    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPeriod))
+    mAC->setMarkerPeriod((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
+    EXPECT_EQ(OK, mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->getMarkerPeriod()))
             << "setPositionUpdatePeriod() failed";
     uint32_t marker;
     EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker))
             << "getPositionUpdatePeriod() failed";
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
     EXPECT_EQ(OK, mAC->audioProcess()) << "audioProcess failed";
-    EXPECT_EQ(marker, mAC->mMarkerPeriod) << "configured marker and received marker are different";
-    EXPECT_EQ(mAC->mReceivedCbMarkerCount, mAC->mNumFramesToRecord / mAC->mMarkerPeriod)
+    EXPECT_EQ(marker, mAC->getMarkerPeriod())
+            << "configured marker and received marker are different";
+    EXPECT_EQ(mAC->waitAndGetReceivedCbMarkerCount(),
+              mAC->mNumFramesToRecord / mAC->getMarkerPeriod())
             << "configured marker and received cb marker are different";
 }
 
@@ -217,12 +224,12 @@
         EXPECT_EQ(mSessionId, mAC->getAudioRecordHandle()->getSessionId());
     if (mTransferType != AudioRecord::TRANSFER_CALLBACK) {
         uint32_t marker;
-        mAC->mMarkerPosition = (mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1);
+        mAC->setMarkerPosition((mAC->mNotificationFrames << 3) + (mAC->mNotificationFrames >> 1));
         EXPECT_EQ(INVALID_OPERATION,
-                  mAC->getAudioRecordHandle()->setMarkerPosition(mAC->mMarkerPosition));
+                  mAC->getAudioRecordHandle()->setMarkerPosition(mAC->getMarkerPosition()));
         EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getMarkerPosition(&marker));
         EXPECT_EQ(INVALID_OPERATION,
-                  mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->mMarkerPosition));
+                  mAC->getAudioRecordHandle()->setPositionUpdatePeriod(mAC->getMarkerPosition()));
         EXPECT_EQ(OK, mAC->getAudioRecordHandle()->getPositionUpdatePeriod(&marker));
     }
     EXPECT_EQ(OK, mAC->start()) << "start recording failed";
diff --git a/media/libaudioclient/tests/audiorouting_tests.cpp b/media/libaudioclient/tests/audiorouting_tests.cpp
index 3b2285e..8151d39 100644
--- a/media/libaudioclient/tests/audiorouting_tests.cpp
+++ b/media/libaudioclient/tests/audiorouting_tests.cpp
@@ -64,16 +64,17 @@
         EXPECT_EQ(OK, ap->start()) << "audio track start failed";
         EXPECT_EQ(OK, ap->onProcess());
         EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-        EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
+        const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+        EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
         EXPECT_NE(0, ap->getAudioTrackHandle()->getFlags() & output_flags[i]);
         audio_patch patch;
-        EXPECT_EQ(OK, getPatchForOutputMix(cb->mAudioIo, patch));
+        EXPECT_EQ(OK, getPatchForOutputMix(audioIo, patch));
         if (output_flags[i] != AUDIO_OUTPUT_FLAG_FAST) {
             // A "normal" output can still have a FastMixer, depending on the buffer size.
             // Thus, a fast track can be created on a mix port which does not have the FAST flag.
             for (auto j = 0; j < patch.num_sources; j++) {
                 if (patch.sources[j].type == AUDIO_PORT_TYPE_MIX &&
-                    patch.sources[j].ext.mix.handle == cb->mAudioIo) {
+                    patch.sources[j].ext.mix.handle == audioIo) {
                     SCOPED_TRACE(dumpPortConfig(patch.sources[j]));
                     EXPECT_NE(0, patch.sources[j].flags.output & output_flags[i])
                             << "expected output flag "
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index 03c15f4..db998cd 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -108,30 +108,32 @@
 // UNIT TESTS
 TEST_F(AudioSystemTest, CheckServerSideValues) {
     ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    EXPECT_GT(mAF->sampleRate(mCbPlayback->mAudioIo), 0);
-    EXPECT_NE(mAF->format(mCbPlayback->mAudioIo), AUDIO_FORMAT_INVALID);
-    EXPECT_GT(mAF->frameCount(mCbPlayback->mAudioIo), 0);
+    const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevice();
+    EXPECT_GT(mAF->sampleRate(pbAudioIo), 0);
+    EXPECT_NE(mAF->format(pbAudioIo), AUDIO_FORMAT_INVALID);
+    EXPECT_GT(mAF->frameCount(pbAudioIo), 0);
     size_t frameCountHal, frameCountHalCache;
-    frameCountHal = mAF->frameCountHAL(mCbPlayback->mAudioIo);
+    frameCountHal = mAF->frameCountHAL(pbAudioIo);
     EXPECT_GT(frameCountHal, 0);
-    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(mCbPlayback->mAudioIo, &frameCountHalCache));
+    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(pbAudioIo, &frameCountHalCache));
     EXPECT_EQ(frameCountHal, frameCountHalCache);
-    EXPECT_GT(mAF->latency(mCbPlayback->mAudioIo), 0);
+    EXPECT_GT(mAF->latency(pbAudioIo), 0);
     // client side latency is at least server side latency
-    EXPECT_LE(mAF->latency(mCbPlayback->mAudioIo), mPlayback->getAudioTrackHandle()->latency());
+    EXPECT_LE(mAF->latency(pbAudioIo), mPlayback->getAudioTrackHandle()->latency());
 
     ASSERT_NO_FATAL_FAILURE(createRecordSession());
-    EXPECT_GT(mAF->sampleRate(mCbRecord->mAudioIo), 0);
-    // EXPECT_NE(mAF->format(mCbRecord->mAudioIo), AUDIO_FORMAT_INVALID);
-    EXPECT_GT(mAF->frameCount(mCbRecord->mAudioIo), 0);
-    EXPECT_GT(mAF->frameCountHAL(mCbRecord->mAudioIo), 0);
-    frameCountHal = mAF->frameCountHAL(mCbRecord->mAudioIo);
+    const auto [recAudioIo, __] = mCbRecord->getLastPortAndDevice();
+    EXPECT_GT(mAF->sampleRate(recAudioIo), 0);
+    // EXPECT_NE(mAF->format(recAudioIo), AUDIO_FORMAT_INVALID);
+    EXPECT_GT(mAF->frameCount(recAudioIo), 0);
+    EXPECT_GT(mAF->frameCountHAL(recAudioIo), 0);
+    frameCountHal = mAF->frameCountHAL(recAudioIo);
     EXPECT_GT(frameCountHal, 0);
-    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(mCbRecord->mAudioIo, &frameCountHalCache));
+    EXPECT_EQ(OK, AudioSystem::getFrameCountHAL(recAudioIo, &frameCountHalCache));
     EXPECT_EQ(frameCountHal, frameCountHalCache);
-    // EXPECT_GT(mAF->latency(mCbRecord->mAudioIo), 0);
+    // EXPECT_GT(mAF->latency(recAudioIo), 0);
     // client side latency is at least server side latency
-    // EXPECT_LE(mAF->latency(mCbRecord->mAudioIo), mCapture->getAudioRecordHandle()->latency());
+    // EXPECT_LE(mAF->latency(recAudioIo), mCapture->getAudioRecordHandle()->latency());
 
     EXPECT_GT(AudioSystem::getPrimaryOutputSamplingRate(), 0);  // first fast mixer sample rate
     EXPECT_GT(AudioSystem::getPrimaryOutputFrameCount(), 0);    // fast mixer frame count
@@ -200,8 +202,9 @@
 TEST_F(AudioSystemTest, GetStreamVolume) {
     ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
     float origStreamVol;
-    EXPECT_EQ(NO_ERROR, AudioSystem::getStreamVolume(AUDIO_STREAM_MUSIC, &origStreamVol,
-                                                     mCbPlayback->mAudioIo));
+    const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevice();
+    EXPECT_EQ(NO_ERROR,
+              AudioSystem::getStreamVolume(AUDIO_STREAM_MUSIC, &origStreamVol, pbAudioIo));
 }
 
 TEST_F(AudioSystemTest, GetStreamMute) {
diff --git a/media/libaudioclient/tests/audiotrack_tests.cpp b/media/libaudioclient/tests/audiotrack_tests.cpp
index cb667a0..cf7d926 100644
--- a/media/libaudioclient/tests/audiotrack_tests.cpp
+++ b/media/libaudioclient/tests/audiotrack_tests.cpp
@@ -157,18 +157,20 @@
     EXPECT_EQ(OK, ap->start()) << "audio track start failed";
     EXPECT_EQ(OK, ap->onProcess());
     EXPECT_EQ(OK, cb->waitForAudioDeviceCb());
-    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, cbOld->mAudioIo);
-    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, cbOld->mDeviceId);
-    EXPECT_NE(AUDIO_IO_HANDLE_NONE, cb->mAudioIo);
-    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, cb->mDeviceId);
-    EXPECT_EQ(cb->mAudioIo, ap->getAudioTrackHandle()->getOutput());
-    EXPECT_EQ(cb->mDeviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
+    const auto [oldAudioIo, oldDeviceId] = cbOld->getLastPortAndDevice();
+    EXPECT_EQ(AUDIO_IO_HANDLE_NONE, oldAudioIo);
+    EXPECT_EQ(AUDIO_PORT_HANDLE_NONE, oldDeviceId);
+    const auto [audioIo, deviceId] = cb->getLastPortAndDevice();
+    EXPECT_NE(AUDIO_IO_HANDLE_NONE, audioIo);
+    EXPECT_NE(AUDIO_PORT_HANDLE_NONE, deviceId);
+    EXPECT_EQ(audioIo, ap->getAudioTrackHandle()->getOutput());
+    EXPECT_EQ(deviceId, ap->getAudioTrackHandle()->getRoutedDeviceId());
     String8 keys;
     keys = ap->getAudioTrackHandle()->getParameters(keys);
     if (!keys.empty()) {
         std::cerr << "track parameters :: " << keys << std::endl;
     }
-    EXPECT_TRUE(checkPatchPlayback(cb->mAudioIo, cb->mDeviceId));
+    EXPECT_TRUE(checkPatchPlayback(audioIo, deviceId));
     EXPECT_EQ(BAD_VALUE, ap->getAudioTrackHandle()->removeAudioDeviceCallback(nullptr));
     EXPECT_EQ(INVALID_OPERATION, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cbOld));
     EXPECT_EQ(OK, ap->getAudioTrackHandle()->removeAudioDeviceCallback(cb));
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 1990858..024589b 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -460,13 +460,17 @@
     args.portConfigId = mixPortConfig.id;
     const bool isOffload = isBitPositionFlagSet(
             aidlOutputFlags, AudioOutputFlags::COMPRESS_OFFLOAD);
+    const bool isHwAvSync = isBitPositionFlagSet(
+            aidlOutputFlags, AudioOutputFlags::HW_AV_SYNC);
     std::shared_ptr<OutputStreamCallbackAidl> streamCb;
     if (isOffload) {
         streamCb = ndk::SharedRefBase::make<OutputStreamCallbackAidl>(this);
     }
     auto eventCb = ndk::SharedRefBase::make<OutputStreamEventCallbackAidl>(this);
-    if (isOffload) {
+    if (isOffload || isHwAvSync) {
         args.offloadInfo = aidlConfig.offloadInfo;
+    }
+    if (isOffload) {
         args.callback = streamCb;
     }
     args.bufferSizeFrames = aidlConfig.frameCount;
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.cpp b/media/libaudiohal/impl/Hal2AidlMapper.cpp
index cbade70..a01ac4b 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.cpp
+++ b/media/libaudiohal/impl/Hal2AidlMapper.cpp
@@ -37,6 +37,7 @@
 using aidl::android::media::audio::common::AudioDeviceType;
 using aidl::android::media::audio::common::AudioFormatDescription;
 using aidl::android::media::audio::common::AudioFormatType;
+using aidl::android::media::audio::common::AudioGainConfig;
 using aidl::android::media::audio::common::AudioInputFlags;
 using aidl::android::media::audio::common::AudioIoFlags;
 using aidl::android::media::audio::common::AudioOutputFlags;
@@ -325,8 +326,8 @@
 }
 
 status_t Hal2AidlMapper::findOrCreateDevicePortConfig(
-        const AudioDevice& device, const AudioConfig* config, AudioPortConfig* portConfig,
-        bool* created) {
+        const AudioDevice& device, const AudioConfig* config, const AudioGainConfig* gainConfig,
+        AudioPortConfig* portConfig, bool* created) {
     if (auto portConfigIt = findPortConfig(device); portConfigIt == mPortConfigs.end()) {
         auto portsIt = findPort(device);
         if (portsIt == mPorts.end()) {
@@ -339,12 +340,18 @@
         if (config != nullptr) {
             setPortConfigFromConfig(&requestedPortConfig, *config);
         }
+        if (gainConfig != nullptr) {
+            requestedPortConfig.gain = *gainConfig;
+        }
         return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
     } else {
         AudioPortConfig requestedPortConfig = portConfigIt->second;
         if (config != nullptr) {
             setPortConfigFromConfig(&requestedPortConfig, *config);
         }
+        if (gainConfig != nullptr) {
+            requestedPortConfig.gain = *gainConfig;
+        }
 
         if (requestedPortConfig != portConfigIt->second) {
             return createOrUpdatePortConfigRetry(requestedPortConfig, portConfig, created);
@@ -447,18 +454,26 @@
                 requestedPortConfig.ext.get<Tag::mix>().handle, source, destinationPortIds,
                 portConfig, created);
     } else if (requestedPortConfig.ext.getTag() == Tag::device) {
-        if (const auto& p = requestedPortConfig;
-                p.sampleRate.has_value() && p.channelMask.has_value() &&
-                p.format.has_value()) {
-            AudioConfig config;
-            setConfigFromPortConfig(&config, requestedPortConfig);
+        const auto& p = requestedPortConfig;
+        const bool hasAudioConfig =
+                p.sampleRate.has_value() && p.channelMask.has_value() && p.format.has_value();
+        const bool hasGainConfig = p.gain.has_value();
+        if (hasAudioConfig || hasGainConfig) {
+            AudioConfig config, *configPtr = nullptr;
+            if (hasAudioConfig) {
+                setConfigFromPortConfig(&config, requestedPortConfig);
+                configPtr = &config;
+            }
+            const AudioGainConfig* gainConfigPtr = nullptr;
+            if (hasGainConfig) gainConfigPtr = &(*(p.gain));
             return findOrCreateDevicePortConfig(
-                    requestedPortConfig.ext.get<Tag::device>().device, &config,
+                    requestedPortConfig.ext.get<Tag::device>().device, configPtr, gainConfigPtr,
                     portConfig, created);
         } else {
+            ALOGD("%s: device port config does not have audio or gain config specified", __func__);
             return findOrCreateDevicePortConfig(
                     requestedPortConfig.ext.get<Tag::device>().device, nullptr /*config*/,
-                    portConfig, created);
+                    nullptr /*gainConfig*/, portConfig, created);
         }
     }
     ALOGW("%s: unsupported audio port config: %s",
@@ -769,7 +784,7 @@
     // then find / create a patch between them, and open a stream on the mix port.
     AudioPortConfig devicePortConfig;
     bool created = false;
-    RETURN_STATUS_IF_ERROR(findOrCreateDevicePortConfig(device, config,
+    RETURN_STATUS_IF_ERROR(findOrCreateDevicePortConfig(device, config, nullptr /*gainConfig*/,
                     &devicePortConfig, &created));
     LOG_ALWAYS_FATAL_IF(devicePortConfig.id == 0);
     if (created) {
@@ -914,6 +929,7 @@
                 !status.isOk()) {
             ALOGE("%s: error while resetting port config %d: %s",
                     __func__, portConfigId, status.getDescription().c_str());
+            return;
         }
         mPortConfigs.erase(it);
         return;
diff --git a/media/libaudiohal/impl/Hal2AidlMapper.h b/media/libaudiohal/impl/Hal2AidlMapper.h
index c70c8af..710b43e 100644
--- a/media/libaudiohal/impl/Hal2AidlMapper.h
+++ b/media/libaudiohal/impl/Hal2AidlMapper.h
@@ -163,6 +163,7 @@
     status_t findOrCreateDevicePortConfig(
             const ::aidl::android::media::audio::common::AudioDevice& device,
             const ::aidl::android::media::audio::common::AudioConfig* config,
+            const ::aidl::android::media::audio::common::AudioGainConfig* gainConfig,
             ::aidl::android::media::audio::common::AudioPortConfig* portConfig,
             bool* created);
     // If the resulting 'portConfig->id' is 0, that means the config was not created,
diff --git a/media/libaudiohal/impl/StreamHalAidl.cpp b/media/libaudiohal/impl/StreamHalAidl.cpp
index 2fb4756..38e1ea4 100644
--- a/media/libaudiohal/impl/StreamHalAidl.cpp
+++ b/media/libaudiohal/impl/StreamHalAidl.cpp
@@ -86,8 +86,8 @@
           mStream(stream),
           mVendorExt(vext),
           mLastReplyLifeTimeNs(
-                  std::min(static_cast<size_t>(100),
-                          2 * mContext.getBufferDurationMs(mConfig.sample_rate))
+                  std::min(static_cast<size_t>(20),
+                           mContext.getBufferDurationMs(mConfig.sample_rate))
                   * NANOS_PER_MILLISECOND)
 {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
@@ -449,6 +449,11 @@
                    state == StreamDescriptor::State::TRANSFER_PAUSED ||
                    state == StreamDescriptor::State::DRAIN_PAUSED) {
             return sendCommand(makeHalCommand<HalCommand::Tag::start>(), reply);
+        } else if (state == StreamDescriptor::State::ACTIVE ||
+                   state == StreamDescriptor::State::TRANSFERRING ||
+                   state == StreamDescriptor::State::DRAINING) {
+            ALOGD("%s: already in stream state: %s", __func__, toString(state).c_str());
+            return OK;
         } else {
             ALOGE("%s: unexpected stream state: %s (expected IDLE or one of *PAUSED states)",
                         __func__, toString(state).c_str());
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
index ca6ff88..7879200 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionEq.cpp
@@ -17,6 +17,7 @@
 #include <cstdint>
 #include <cstring>
 #include <optional>
+#include <unordered_set>
 #define LOG_TAG "AidlConversionEQ"
 //#define LOG_NDEBUG 0
 
@@ -262,10 +263,21 @@
         }
         case EQ_PARAM_GET_NUM_OF_PRESETS: {
             Parameter aidlParam = VALUE_OR_RETURN_STATUS(getAidlParameter(Equalizer::presets));
-            const auto& presets = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
+            auto presets = VALUE_OR_RETURN_STATUS(GET_PARAMETER_SPECIFIC_FIELD(
                     aidlParam, Equalizer, equalizer, Equalizer::presets,
                     std::vector<Equalizer::Preset>));
-            uint16_t num = presets.size();
+            // it was assumed the presets index in the range of [0, NUM_OF_PRESETS - 1], so
+            // filter out presets out of this range (one example is preset {-1, "custom"})
+            std::erase_if(presets, [](const auto& preset) { return preset.index < 0; });
+            // validate remaining indexes are unique [0, num - 1]
+            std::unordered_set<uint16_t> uniqueIndices;
+            const uint16_t num = presets.size();
+            for (const auto& preset : presets) {
+                if (preset.index >= num || 0 != uniqueIndices.count(preset.index)) {
+                    return BAD_VALUE;
+                }
+                uniqueIndices.insert(preset.index);
+            }
             return param.writeToValue(&num);
         }
         case EQ_PARAM_GET_PRESET_NAME: {
diff --git a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
index 0bd6fb0..50b748e 100644
--- a/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
+++ b/media/libaudiohal/tests/CoreAudioHalAidl_test.cpp
@@ -29,6 +29,7 @@
 #include <aidl/android/hardware/audio/core/BnModule.h>
 #include <aidl/android/hardware/audio/core/BnStreamCommon.h>
 #include <aidl/android/media/audio/BnHalAdapterVendorExtension.h>
+#include <aidl/android/media/audio/common/AudioGainMode.h>
 #include <aidl/android/media/audio/common/Int.h>
 #include <utils/Log.h>
 
@@ -44,6 +45,8 @@
 using ::aidl::android::media::audio::common::AudioDeviceType;
 using ::aidl::android::media::audio::common::AudioFormatDescription;
 using ::aidl::android::media::audio::common::AudioFormatType;
+using ::aidl::android::media::audio::common::AudioGainConfig;
+using ::aidl::android::media::audio::common::AudioGainMode;
 using ::aidl::android::media::audio::common::AudioIoFlags;
 using ::aidl::android::media::audio::common::AudioPort;
 using ::aidl::android::media::audio::common::AudioPortConfig;
@@ -179,6 +182,11 @@
     primaryInMix.profiles = standardPcmAudioProfiles;
     c.ports.push_back(primaryInMix);
 
+    AudioPort speakerOutDevice = createPort(c.nextPortId++, "Speaker", 0, false,
+                                            createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0));
+    speakerOutDevice.profiles = standardPcmAudioProfiles;
+    c.ports.push_back(speakerOutDevice);
+
     AudioPort btOutDevice =
             createPort(c.nextPortId++, "BT A2DP Out", 0, false,
                        createPortDeviceExt(AudioDeviceType::OUT_DEVICE, 0,
@@ -209,6 +217,13 @@
         getAudioPatches(&result);
         return result;
     }
+    std::optional<AudioPortConfig> getPortConfig(int32_t id) {
+        auto iter = findById<AudioPortConfig>(mConfig.portConfigs, id);
+        if (iter != mConfig.portConfigs.end()) {
+            return *iter;
+        }
+        return std::nullopt;
+    }
 
   private:
     ndk::ScopedAStatus setModuleDebug(
@@ -646,6 +661,19 @@
 }
 }  // namespace aidl::android::hardware::audio::core
 
+namespace aidl::android::media::audio::common {
+template <typename P>
+std::enable_if_t<std::is_function_v<typename mf_traits<decltype(&P::toString)>::member_type>,
+                 std::ostream&>
+operator<<(std::ostream& os, const P& p) {
+    return os << p.toString();
+}
+template <typename E>
+std::enable_if_t<std::is_enum_v<E>, std::ostream&> operator<<(std::ostream& os, const E& e) {
+    return os << toString(e);
+}
+}  // namespace aidl::android::media::audio::common
+
 using namespace android;
 
 namespace {
@@ -1214,3 +1242,55 @@
     EXPECT_EQ(std::vector<int32_t>{backMicPortConfig.id}, patchIt->sourcePortConfigIds);
     EXPECT_EQ(std::vector<int32_t>{mixPortConfig.id}, patchIt->sinkPortConfigIds);
 }
+
+TEST_F(Hal2AidlMapperTest, SetAudioPortConfigGainChangeExistingPortConfig) {
+    // First set config, then update gain.
+    AudioPortConfig speakerPortConfig;
+    speakerPortConfig.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    speakerPortConfig.channelMask = AudioChannelLayout::make<AudioChannelLayout::layoutMask>(
+            AudioChannelLayout::LAYOUT_STEREO);
+    speakerPortConfig.format =
+            AudioFormatDescription{.type = AudioFormatType::PCM, .pcm = PcmType::INT_16_BIT};
+    speakerPortConfig.sampleRate = ::aidl::android::media::audio::common::Int(48000);
+    AudioPortConfig resultingPortConfig;
+    ASSERT_EQ(OK,
+              mMapper->setPortConfig(speakerPortConfig, std::set<int32_t>(), &resultingPortConfig));
+    EXPECT_NE(0, resultingPortConfig.id);
+    EXPECT_NE(0, resultingPortConfig.portId);
+
+    AudioPortConfig gainUpdate;
+    gainUpdate.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    AudioGainConfig gainConfig{.index = -1,
+                               .mode = 1 << static_cast<int>(AudioGainMode::JOINT),
+                               .channelMask = AudioChannelLayout{},
+                               .values = std::vector<int32_t>{-3200},
+                               .rampDurationMs = 0};
+    gainUpdate.gain = gainConfig;
+    AudioPortConfig resultingGainUpdate;
+    ASSERT_EQ(OK, mMapper->setPortConfig(gainUpdate, std::set<int32_t>(), &resultingGainUpdate));
+    EXPECT_EQ(resultingPortConfig.id, resultingGainUpdate.id);
+    auto updatedPortConfig = mModule->getPortConfig(resultingGainUpdate.id);
+    ASSERT_TRUE(updatedPortConfig.has_value());
+    ASSERT_TRUE(updatedPortConfig->gain.has_value());
+    EXPECT_EQ(gainConfig, updatedPortConfig->gain);
+}
+
+TEST_F(Hal2AidlMapperTest, SetAudioPortConfigGainChangeFromScratch) {
+    // Set gain as the first operation, the HAL should suggest the rest of the configuration.
+    AudioPortConfig gainSet;
+    gainSet.ext = createPortDeviceExt(AudioDeviceType::OUT_SPEAKER, 0);
+    AudioGainConfig gainConfig{.index = -1,
+                               .mode = 1 << static_cast<int>(AudioGainMode::JOINT),
+                               .channelMask = AudioChannelLayout{},
+                               .values = std::vector<int32_t>{-3200},
+                               .rampDurationMs = 0};
+    gainSet.gain = gainConfig;
+    AudioPortConfig resultingPortConfig;
+    ASSERT_EQ(OK, mMapper->setPortConfig(gainSet, std::set<int32_t>(), &resultingPortConfig));
+    EXPECT_NE(0, resultingPortConfig.id);
+    EXPECT_NE(0, resultingPortConfig.portId);
+    auto portConfig = mModule->getPortConfig(resultingPortConfig.id);
+    ASSERT_TRUE(portConfig.has_value());
+    ASSERT_TRUE(portConfig->gain.has_value());
+    EXPECT_EQ(gainConfig, portConfig->gain);
+}
diff --git a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
index fff2feb..d5e3cf7 100644
--- a/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
+++ b/media/libeffects/lvm/wrapper/Aidl/BundleContext.cpp
@@ -185,7 +185,6 @@
     }
     RETURN_VALUE_IF(LVM_SUCCESS != LVM_SetControlParameters(mInstance, &params),
                     RetCode::ERROR_EFFECT_LIB_ERROR, "failSetControlParams");
-    mEnabled = false;
     return limitLevel();
 }
 
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 9cd0e6e..e5323a6 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -364,7 +364,6 @@
         "av-types-aidl-cpp",
         "liblog",
         "libcutils",
-        "libprocessgroup",
         "libutils",
         "libbinder",
         "libbinder_ndk",
diff --git a/media/libmedia/IMediaMetadataRetriever.cpp b/media/libmedia/IMediaMetadataRetriever.cpp
index cdb1837..ef6250f 100644
--- a/media/libmedia/IMediaMetadataRetriever.cpp
+++ b/media/libmedia/IMediaMetadataRetriever.cpp
@@ -23,7 +23,6 @@
 #include <binder/Parcel.h>
 #include <media/IMediaHTTPService.h>
 #include <media/IMediaMetadataRetriever.h>
-#include <processgroup/sched_policy.h>
 #include <utils/String8.h>
 #include <utils/KeyedVector.h>
 
diff --git a/media/libmedia/include/media/RingBuffer.h b/media/libmedia/include/media/RingBuffer.h
index 4d92d87..a08f35e 100644
--- a/media/libmedia/include/media/RingBuffer.h
+++ b/media/libmedia/include/media/RingBuffer.h
@@ -44,8 +44,14 @@
     /**
      * Forward iterator to this class.  Implements an std:forward_iterator.
      */
-    class iterator : public std::iterator<std::forward_iterator_tag, T> {
+    class iterator {
     public:
+        using iterator_category = std::forward_iterator_tag;
+        using value_type = T;
+        using difference_type = std::ptrdiff_t;
+        using pointer = T*;
+        using reference = T&;
+
         iterator(T* ptr, size_t size, size_t pos, size_t ctr);
 
         iterator& operator++();
@@ -357,5 +363,3 @@
 }; // namespace android
 
 #endif // ANDROID_SERVICE_UTILS_RING_BUFFER_H
-
-
diff --git a/media/libmediametrics/include/MediaMetricsConstants.h b/media/libmediametrics/include/MediaMetricsConstants.h
index e60a678..f367a3e 100644
--- a/media/libmediametrics/include/MediaMetricsConstants.h
+++ b/media/libmediametrics/include/MediaMetricsConstants.h
@@ -267,6 +267,7 @@
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETLOGSESSIONID  "setLogSessionId" // AudioTrack, Record
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYBACKPARAM "setPlaybackParam" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETPLAYERIID "setPlayerIId" // AudioTrack
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSAMPLERATE "setSampleRate" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETSTARTTHRESHOLD "setStartThreshold" // AudioTrack
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOICEVOLUME   "setVoiceVolume" // AudioFlinger
 #define AMEDIAMETRICS_PROP_EVENT_VALUE_SETVOLUME  "setVolume"  // AudioTrack
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index e229844..26b8d0c 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,10 +150,15 @@
     int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
 
     if (camera == 0) {
-        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+        AttributionSourceState clientAttribution;
+        clientAttribution.pid = clientPid;
+        clientAttribution.uid = clientUid;
+        clientAttribution.deviceId = kDefaultDeviceId;
+        clientAttribution.packageName = clientName;
+
+        mCamera = Camera::connect(cameraId, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                /*forceSlowJpegMode*/false);
+                /*forceSlowJpegMode*/false, clientAttribution);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index e918b5e..15188b0 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -72,6 +72,9 @@
 static const int64_t kMaxMetadataSize = 0x4000000LL;   // 64MB max per-frame metadata size
 static const int64_t kMaxCttsOffsetTimeUs = 30 * 60 * 1000000LL;  // 30 minutes
 static const size_t kESDSScratchBufferSize = 10;  // kMaxAtomSize in Mpeg4Extractor 64MB
+// Allow up to 100 milli second, which is safely above the maximum delay observed in manual testing
+// between posting from setNextFd and handling it
+static const int64_t kFdCondWaitTimeoutNs = 100000000;
 
 static const char kMetaKey_Version[]    = "com.android.version";
 static const char kMetaKey_Manufacturer[]      = "com.android.manufacturer";
@@ -1262,9 +1265,13 @@
         return OK;
     }
 
+    // Wait for the signal only if the new file is not available.
     if (mNextFd == -1) {
-        ALOGW("No FileDescriptor for next recording");
-        return INVALID_OPERATION;
+        status_t res = mFdCond.waitRelative(mLock, kFdCondWaitTimeoutNs);
+        if (res != OK) {
+            ALOGW("No FileDescriptor for next recording");
+            return INVALID_OPERATION;
+        }
     }
 
     mSwitchPending = true;
@@ -2433,6 +2440,7 @@
         return INVALID_OPERATION;
     }
     mNextFd = dup(fd);
+    mFdCond.signal();
     return OK;
 }
 
@@ -4886,8 +4894,15 @@
             int32_t mediaTime = (mFirstSampleStartOffsetUs * mTimeScale + 5E5) / 1E6;
             int32_t firstSampleOffsetTicks =
                     (mFirstSampleStartOffsetUs * mvhdTimeScale + 5E5) / 1E6;
-            // samples before 0 don't count in for duration, hence subtract firstSampleOffsetTicks.
-            addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+            if (tkhdDurationTicks >= firstSampleOffsetTicks) {
+                // samples before 0 don't count in for duration, hence subtract
+                // firstSampleOffsetTicks.
+                addOneElstTableEntry(tkhdDurationTicks - firstSampleOffsetTicks, mediaTime, 1, 0);
+            } else {
+                ALOGW("The track header duration %" PRId64
+                      " is smaller than the first sample offset %" PRId64,
+                      mTrackDurationUs, mFirstSampleStartOffsetUs);
+            }
         } else {
             // Track starting at zero.
             ALOGV("No edit list entry required for this track");
diff --git a/media/libstagefright/include/media/stagefright/MPEG4Writer.h b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
index 054a4b8..ee75129 100644
--- a/media/libstagefright/include/media/stagefright/MPEG4Writer.h
+++ b/media/libstagefright/include/media/stagefright/MPEG4Writer.h
@@ -144,6 +144,7 @@
     std::mutex mFallocMutex;
     bool mPreAllocFirstTime; // Pre-allocate space for file and track headers only once per file.
     uint64_t mPrevAllTracksTotalMetaDataSizeEstimate;
+    Condition mFdCond;
 
     List<Track *> mTracks;
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
index 72785d5..4e4aa75 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecConstants.h
@@ -882,7 +882,6 @@
 inline constexpr int32_t CRYPTO_MODE_AES_CBC     = 2;
 inline constexpr int32_t CRYPTO_MODE_AES_CTR     = 1;
 inline constexpr int32_t CRYPTO_MODE_UNENCRYPTED = 0;
-inline constexpr int32_t INFO_OUTPUT_BUFFERS_CHANGED = -3;
 inline constexpr int32_t INFO_OUTPUT_FORMAT_CHANGED  = -2;
 inline constexpr int32_t INFO_TRY_AGAIN_LATER        = -1;
 inline constexpr int32_t VIDEO_SCALING_MODE_SCALE_TO_FIT               = 1;
diff --git a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
index 4183023..4ab5d10 100644
--- a/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
+++ b/media/libstagefright/omx/SoftVideoDecoderOMXComponent.cpp
@@ -619,6 +619,13 @@
                 if (!isValidOMXParam(outParams)) {
                     return OMX_ErrorBadParameter;
                 }
+                if (offsetof(DescribeHDR10PlusInfoParams, nValue) + outParams->nParamSize >
+                    outParams->nSize) {
+                    ALOGE("b/329641908: too large param size; nParamSize=%u nSize=%u",
+                          outParams->nParamSize, outParams->nSize);
+                    android_errorWriteLog(0x534e4554, "329641908");
+                    return OMX_ErrorBadParameter;
+                }
 
                 outParams->nParamSizeUsed = info->size();
 
diff --git a/media/module/bufferpool/2.0/AccessorImpl.cpp b/media/module/bufferpool/2.0/AccessorImpl.cpp
index 202d803..b9483bf 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.cpp
+++ b/media/module/bufferpool/2.0/AccessorImpl.cpp
@@ -17,6 +17,8 @@
 #define LOG_TAG "BufferPoolAccessor2.0"
 //#define LOG_NDEBUG 0
 
+#include <android-base/no_destructor.h>
+
 #include <sys/types.h>
 #include <stdint.h>
 #include <time.h>
@@ -147,7 +149,25 @@
 #endif
 
 static constexpr uint32_t kSeqIdMax = 0x7fffffff;
-uint32_t Accessor::Impl::sSeqId = time(nullptr) & kSeqIdMax;
+
+Accessor::Impl::ConnectionIdGenerator::ConnectionIdGenerator() {
+    mSeqId = static_cast<uint32_t>(time(nullptr) & kSeqIdMax);
+    mPid = static_cast<int32_t>(getpid());
+}
+
+ConnectionId Accessor::Impl::ConnectionIdGenerator::getConnectionId() {
+    uint32_t seq;
+    {
+        std::lock_guard<std::mutex> l(mLock);
+        seq = mSeqId;
+        if (mSeqId == kSeqIdMax) {
+            mSeqId = 0;
+        } else {
+            ++mSeqId;
+        }
+    }
+    return (int64_t)mPid << 32 | seq | kSeqIdVndkBit;
+}
 
 Accessor::Impl::Impl(
         const std::shared_ptr<BufferPoolAllocator> &allocator)
@@ -163,13 +183,14 @@
         uint32_t *pMsgId,
         const StatusDescriptor** statusDescPtr,
         const InvalidationDescriptor** invDescPtr) {
+    static ::android::base::NoDestructor<ConnectionIdGenerator> sConIdGenerator;
     sp<Connection> newConnection = new Connection();
     ResultStatus status = ResultStatus::CRITICAL_ERROR;
     {
         std::lock_guard<std::mutex> lock(mBufferPool.mMutex);
         if (newConnection) {
             int32_t pid = getpid();
-            ConnectionId id = (int64_t)pid << 32 | sSeqId | kSeqIdVndkBit;
+            ConnectionId id = sConIdGenerator->getConnectionId();
             status = mBufferPool.mObserver.open(id, statusDescPtr);
             if (status == ResultStatus::OK) {
                 newConnection->initialize(accessor, id);
@@ -179,11 +200,6 @@
                 mBufferPool.mConnectionIds.insert(id);
                 mBufferPool.mInvalidationChannel.getDesc(invDescPtr);
                 mBufferPool.mInvalidation.onConnect(id, observer);
-                if (sSeqId == kSeqIdMax) {
-                   sSeqId = 0;
-                } else {
-                    ++sSeqId;
-                }
             }
 
         }
diff --git a/media/module/bufferpool/2.0/AccessorImpl.h b/media/module/bufferpool/2.0/AccessorImpl.h
index 3d39941..2366177 100644
--- a/media/module/bufferpool/2.0/AccessorImpl.h
+++ b/media/module/bufferpool/2.0/AccessorImpl.h
@@ -77,7 +77,14 @@
 private:
     // ConnectionId = pid : (timestamp_created + seqId)
     // in order to guarantee uniqueness for each connection
-    static uint32_t sSeqId;
+    struct ConnectionIdGenerator {
+        int32_t mPid;
+        uint32_t mSeqId;
+        std::mutex mLock;
+
+        ConnectionIdGenerator();
+        ConnectionId getConnectionId();
+    };
 
     const std::shared_ptr<BufferPoolAllocator> mAllocator;
 
diff --git a/media/module/bufferpool/2.0/Android.bp b/media/module/bufferpool/2.0/Android.bp
index bdab103..c40603c 100644
--- a/media/module/bufferpool/2.0/Android.bp
+++ b/media/module/bufferpool/2.0/Android.bp
@@ -21,6 +21,9 @@
     export_include_dirs: [
         "include",
     ],
+    header_libs: [
+        "libbase_headers",
+    ],
     shared_libs: [
         "libcutils",
         "libfmq",
diff --git a/media/module/foundation/tests/AData_test.cpp b/media/module/foundation/tests/AData_test.cpp
index 2628a47..87b69a6 100644
--- a/media/module/foundation/tests/AData_test.cpp
+++ b/media/module/foundation/tests/AData_test.cpp
@@ -392,7 +392,7 @@
     EXPECT_EQ(2L, _shared.use_count()); // still both u and _shared contains the object
 
     EXPECT_TRUE(u.clear());
-    EXPECT_TRUE(_shared.unique()); // now only _shared contains the object
+    EXPECT_EQ(1L, _shared.use_count()); // now only _shared contains the object
 
     EXPECT_TRUE(u.set(_constShared));
     EXPECT_EQ(2L, _constShared.use_count()); // even though it is const, we can add a use count
@@ -591,7 +591,7 @@
     EXPECT_EQ(2L, _shared.use_count()); // still both u and _shared contains the object
 
     EXPECT_TRUE(u.clear());
-    EXPECT_TRUE(_shared.unique()); // now only _shared contains the object
+    EXPECT_EQ(1L, _shared.use_count()); // now only _shared contains the object
 
     EXPECT_TRUE(u.set(_constShared));
     EXPECT_EQ(2L, _constShared.use_count()); // even though it is const, we can add a use count
diff --git a/media/mtp/tests/MtpFuzzer/Android.bp b/media/mtp/tests/MtpFuzzer/Android.bp
index acae06a..2e9c58b 100644
--- a/media/mtp/tests/MtpFuzzer/Android.bp
+++ b/media/mtp/tests/MtpFuzzer/Android.bp
@@ -29,7 +29,6 @@
         "liblog",
         "libutils",
     ],
-    static_libs: ["libc++fs",],
     cflags: [
         "-Wall",
         "-Wextra",
diff --git a/media/ndk/include/media/NdkMediaDataSource.h b/media/ndk/include/media/NdkMediaDataSource.h
index 197e202..def142c 100644
--- a/media/ndk/include/media/NdkMediaDataSource.h
+++ b/media/ndk/include/media/NdkMediaDataSource.h
@@ -49,16 +49,16 @@
 /*
  * AMediaDataSource's callbacks will be invoked on an implementation-defined thread
  * or thread pool. No guarantees are provided about which thread(s) will be used for
- * callbacks. For example, |close| can be invoked from a different thread than the
- * thread invoking |readAt|. As such, the Implementations of AMediaDataSource callbacks
+ * callbacks. For example, `close` can be invoked from a different thread than the
+ * thread invoking `readAt`. As such, the Implementations of AMediaDataSource callbacks
  * must be threadsafe.
  */
 
 /**
- * Called to request data from the given |offset|.
+ * Called to request data from the given `offset`.
  *
- * Implementations should should write up to |size| bytes into
- * |buffer|, and return the number of bytes written.
+ * Implementations should should write up to `size` bytes into
+ * `buffer`, and return the number of bytes written.
  *
  * Return 0 if size is zero (thus no bytes are read).
  *
@@ -78,9 +78,9 @@
  * Called to close the data source, unblock reads, and release associated
  * resources.
  *
- * The NDK media framework guarantees that after the first |close| is
+ * The NDK media framework guarantees that after the first `close` is
  * called, no future callbacks will be invoked on the data source except
- * for |close| itself.
+ * for `close` itself.
  *
  * Closing a data source allows readAt calls that were blocked waiting
  * for I/O data to return promptly.
@@ -101,7 +101,7 @@
 
 /**
  * Called to get an estimate of the number of bytes that can be read from this data source
- * starting at |offset| without blocking for I/O.
+ * starting at `offset` without blocking for I/O.
  *
  * Return -1 when such an estimate is not possible.
  */
@@ -111,10 +111,10 @@
  * Create new media data source. Returns NULL if memory allocation
  * for the new data source object fails.
  *
- * Set the |uri| from which the data source will read,
+ * Set the `uri` from which the data source will read,
  * plus additional http headers when initiating the request.
  *
- * Headers will contain corresponding items from |key_values|
+ * Headers will contain corresponding items from `key_values`
  * in the following fashion:
  *
  * key_values[0]:key_values[1]
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 4b0192a..7bec8cf 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -47,7 +47,6 @@
 static const String16 sModifyPhoneState("android.permission.MODIFY_PHONE_STATE");
 static const String16 sModifyAudioRouting("android.permission.MODIFY_AUDIO_ROUTING");
 static const String16 sCallAudioInterception("android.permission.CALL_AUDIO_INTERCEPTION");
-static const String16 sAndroidPermissionBluetoothConnect("android.permission.BLUETOOTH_CONNECT");
 
 static String16 resolveCallingPackage(PermissionController& permissionController,
         const std::optional<String16> opPackageName, uid_t uid) {
@@ -393,48 +392,6 @@
     return NO_ERROR;
 }
 
-/**
- * Determines if the MAC address in Bluetooth device descriptors returned by APIs of
- * a native audio service (audio flinger, audio policy) must be anonymized.
- * MAC addresses returned to system server or apps with BLUETOOTH_CONNECT permission
- * are not anonymized.
- *
- * @param attributionSource The attribution source of the calling app.
- * @param caller string identifying the caller for logging.
- * @return true if the MAC addresses must be anonymized, false otherwise.
- */
-bool mustAnonymizeBluetoothAddress(
-        const AttributionSourceState& attributionSource, const String16& caller) {
-    uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
-    if (isAudioServerOrSystemServerUid(uid)) {
-        return false;
-    }
-    const std::optional<AttributionSourceState> resolvedAttributionSource =
-            resolveAttributionSource(attributionSource, DEVICE_ID_DEFAULT);
-    if (!resolvedAttributionSource.has_value()) {
-        return true;
-    }
-    permission::PermissionChecker permissionChecker;
-    return permissionChecker.checkPermissionForPreflightFromDatasource(
-            sAndroidPermissionBluetoothConnect, resolvedAttributionSource.value(), caller,
-            AppOpsManager::OP_BLUETOOTH_CONNECT)
-                != permission::PermissionChecker::PERMISSION_GRANTED;
-}
-
-/**
- * Modifies the passed MAC address string in place for consumption by unprivileged clients.
- * the string is assumed to have a valid MAC address format.
- * the anonymzation must be kept in sync with toAnonymizedAddress() in BluetoothUtils.java
- *
- * @param address input/output the char string contining the MAC address to anonymize.
- */
-void anonymizeBluetoothAddress(char *address) {
-    if (address == nullptr || strlen(address) != strlen("AA:BB:CC:DD:EE:FF")) {
-        return;
-    }
-    memcpy(address, "XX:XX:XX:XX", strlen("XX:XX:XX:XX"));
-}
-
 sp<content::pm::IPackageManagerNative> MediaPackageManager::retrievePackageManager() {
     const sp<IServiceManager> sm = defaultServiceManager();
     if (sm == nullptr) {
diff --git a/media/utils/include/mediautils/ServiceUtilities.h b/media/utils/include/mediautils/ServiceUtilities.h
index 9c02cd4..e0fabfd 100644
--- a/media/utils/include/mediautils/ServiceUtilities.h
+++ b/media/utils/include/mediautils/ServiceUtilities.h
@@ -113,10 +113,6 @@
 bool bypassInterruptionPolicyAllowed(const AttributionSourceState& attributionSource);
 bool callAudioInterceptionAllowed(const AttributionSourceState& attributionSource);
 void purgePermissionCache();
-bool mustAnonymizeBluetoothAddress(
-        const AttributionSourceState& attributionSource, const String16& caller);
-void anonymizeBluetoothAddress(char *address);
-
 int32_t getOpForSource(audio_source_t source);
 
 AttributionSourceState getCallingAttributionSource();
diff --git a/media/utils/tests/static_string_view_tests.cpp b/media/utils/tests/static_string_view_tests.cpp
index c00de68..1dd2370 100644
--- a/media/utils/tests/static_string_view_tests.cpp
+++ b/media/utils/tests/static_string_view_tests.cpp
@@ -37,14 +37,12 @@
     // const std::array<char,2> nonstatic = {'a', 'b'};
     // static_assert(can_assign<nonstatic>::value == false);
     static std::array<char, 2> nonconst = {'a', 'b'};
-    static const std::array<char, 2> nonconstexpr = {'a', 'b'};
     static constexpr std::array<int, 2> nonchar = {1, 2};
     static constexpr size_t nonarray = 2;
 
     static_assert(CanCreate<nonconst>::value == false);
     static_assert(CanCreate<nonarray>::value == false);
     static_assert(CanCreate<nonchar>::value == false);
-    static_assert(CanCreate<nonconstexpr>::value == false);
 
     static constexpr std::array<char, 2> scoped = {'a', 'b'};
     constexpr StaticStringView Ticket1 = StaticStringView::create<global>();
diff --git a/services/audioflinger/Android.bp b/services/audioflinger/Android.bp
index b763f09..2abf682 100644
--- a/services/audioflinger/Android.bp
+++ b/services/audioflinger/Android.bp
@@ -213,6 +213,7 @@
     ],
 
     static_libs: [
+        "audiopermissioncontroller",
         "libcpustats",
         "libpermission",
     ],
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 09edf68..e76ece2 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -39,13 +39,17 @@
 #include <binder/IServiceManager.h>
 #include <binder/Parcel.h>
 #include <cutils/properties.h>
+#include <com_android_media_audio.h>
 #include <com_android_media_audioserver.h>
 #include <media/AidlConversion.h>
 #include <media/AudioParameter.h>
 #include <media/AudioValidator.h>
 #include <media/IMediaLogService.h>
+#include <media/IPermissionProvider.h>
 #include <media/MediaMetricsItem.h>
+#include <media/NativePermissionController.h>
 #include <media/TypeConverter.h>
+#include <media/ValidatedAttributionSourceState.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/MemoryLeakTrackUtil.h>
 #include <mediautils/MethodStatistics.h>
@@ -81,12 +85,17 @@
 namespace android {
 
 using ::android::base::StringPrintf;
+using aidl_utils::statusTFromBinderStatus;
 using media::IEffectClient;
 using media::audio::common::AudioMMapPolicyInfo;
 using media::audio::common::AudioMMapPolicyType;
 using media::audio::common::AudioMode;
 using android::content::AttributionSourceState;
 using android::detail::AudioHalVersionInfo;
+using com::android::media::permission::INativePermissionController;
+using com::android::media::permission::IPermissionProvider;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::ValidatedAttributionSourceState;
 
 static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
         AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
@@ -118,6 +127,52 @@
     }
 }
 
+static error::BinderResult<ValidatedAttributionSourceState>
+validateAttributionFromContextOrTrustedCaller(AttributionSourceState attr,
+        const IPermissionProvider& provider) {
+    const auto callingUid = IPCThreadState::self()->getCallingUid();
+    // We trust the following UIDs to appropriate validated identities above us
+    if (isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+        // Legacy paths may not properly populate package name, so we attempt to handle.
+        if (!attr.packageName.has_value() || attr.packageName.value() == "") {
+            ALOGW("Trusted client %d provided attr with missing package name" , callingUid);
+            attr.packageName = VALUE_OR_RETURN(provider.getPackagesForUid(callingUid))[0];
+        }
+        // Behavior change: In the case of delegation, if pid is invalid,
+        // filling it in with the callingPid will cause a mismatch between the
+        // pid and the uid in the attribution, which is error-prone.
+        // Instead, assert that the pid from a trusted source is valid
+        if (attr.pid == -1) {
+            if (callingUid != static_cast<uid_t>(attr.uid)) {
+                return error::unexpectedExceptionCode(binder::Status::EX_ILLEGAL_ARGUMENT,
+                        "validateAttribution: Invalid pid from delegating trusted source");
+            } else {
+                // Legacy handling for trusted clients which may not fill pid correctly
+                attr.pid = IPCThreadState::self()->getCallingPid();
+            }
+        }
+        return ValidatedAttributionSourceState::createFromTrustedSource(std::move(attr));
+    } else {
+        // Behavior change: Populate pid with callingPid unconditionally. Previously, we
+        // allowed caller provided pid, if uid matched calling context, but this is error-prone
+        // since it allows mismatching uid/pid
+        return ValidatedAttributionSourceState::createFromBinderContext(std::move(attr), provider);
+    }
+}
+
+#define VALUE_OR_RETURN_CONVERTED(exp)                                                \
+    ({                                                                                \
+        auto _tmp = (exp);                                                            \
+        if (!_tmp.ok()) {                                                             \
+            ALOGE("Function: %s Line: %d Failed result (%s)", __FUNCTION__, __LINE__, \
+                  errorToString(_tmp.error()).c_str());                               \
+            return statusTFromBinderStatus(_tmp.error());                             \
+        }                                                                             \
+        std::move(_tmp.value());                                                      \
+    })
+
+
+
 // Creates association between Binder code to name for IAudioFlinger.
 #define IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST \
 BINDER_METHOD_ENTRY(createTrack) \
@@ -519,30 +574,42 @@
     audio_attributes_t localAttr = *attr;
 
     // TODO b/182392553: refactor or make clearer
-    pid_t clientPid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
-    bool updatePid = (clientPid == (pid_t)-1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    AttributionSourceState adjAttributionSource;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        pid_t clientPid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+        bool updatePid = (clientPid == (pid_t)-1);
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
 
-    AttributionSourceState adjAttributionSource = client.attributionSource;
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        uid_t clientUid =
-            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
-        ALOGW_IF(clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        updatePid = true;
-    }
-    if (updatePid) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
+        adjAttributionSource = client.attributionSource;
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            uid_t clientUid =
+                VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+            ALOGW_IF(clientUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, clientUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            updatePid = true;
+        }
+        if (updatePid) {
+            const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, clientPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
             adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(client.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+    }
 
     if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
         audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
@@ -997,36 +1064,50 @@
     bool isSpatialized = false;
     bool isBitPerfect = false;
 
-    // TODO b/182392553: refactor or make clearer
-    pid_t clientPid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(input.clientInfo.attributionSource.pid));
-    bool updatePid = (clientPid == (pid_t)-1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    uid_t clientUid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(input.clientInfo.attributionSource.uid));
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
     audio_attributes_t localAttr = input.attr;
 
-    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        ALOGW_IF(clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        clientUid = callingUid;
-        updatePid = true;
+    AttributionSourceState adjAttributionSource;
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = input.clientInfo.attributionSource;
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        uid_t clientUid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(
+                        input.clientInfo.attributionSource.uid));
+        pid_t clientPid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+                        input.clientInfo.attributionSource.pid));
+        bool updatePid = (clientPid == (pid_t)-1);
+
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            ALOGW_IF(clientUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, clientUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            clientUid = callingUid;
+            updatePid = true;
+        }
+        if (updatePid) {
+            ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, clientPid);
+            clientPid = callingPid;
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                adjAttributionSource);
+
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(input.clientInfo.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
     }
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    if (updatePid) {
-        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        clientPid = callingPid;
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
-            adjAttributionSource);
 
     audio_session_t sessionId = input.sessionId;
     if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -1079,7 +1160,7 @@
             goto Exit;
         }
 
-        client = registerPid(clientPid);
+        client = registerPid(adjAttributionSource.pid);
 
         IAfPlaybackThread* effectThread = nullptr;
         sp<IAfEffectChain> effectChain = nullptr;
@@ -2199,6 +2280,12 @@
     }
 }
 
+const IPermissionProvider& AudioFlinger::getPermissionProvider() {
+    // This is inited as part of service construction, prior to binder registration,
+    // so it should always be non-null.
+    return mAudioPolicyServiceLocal.load()->getPermissionProvider();
+}
+
 // removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -2308,30 +2395,43 @@
     output.buffers.clear();
     output.inputId = AUDIO_IO_HANDLE_NONE;
 
-    // TODO b/182392553: refactor or clean up
-    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    bool updatePid = (adjAttributionSource.pid == -1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
-           adjAttributionSource.uid));
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        ALOGW_IF(currentUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, currentUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        updatePid = true;
+    AttributionSourceState adjAttributionSource;
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = input.clientInfo.attributionSource;
+        bool updatePid = (adjAttributionSource.pid == -1);
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
+               adjAttributionSource.uid));
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            ALOGW_IF(currentUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, currentUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            updatePid = true;
+        }
+        const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+                adjAttributionSource.pid));
+        if (updatePid) {
+            ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, currentPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(
+                    input.clientInfo.attributionSource,
+                    getPermissionProvider()
+                    ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
     }
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
-            adjAttributionSource.pid));
-    if (updatePid) {
-        ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, currentPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
-            adjAttributionSource);
+
     // further format checks are performed by createRecordTrack_l()
     if (!audio_is_valid_format(input.config.format)) {
         ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -3812,7 +3912,11 @@
 
 IAfPlaybackThread* AudioFlinger::primaryPlaybackThread_l() const
 {
-    audio_utils::lock_guard lock(hardwareMutex());
+    // The atomic ptr mPrimaryHardwareDev requires both the
+    // AudioFlinger and the Hardware mutex for modification.
+    // As we hold the AudioFlinger mutex, we access it
+    // safely without the Hardware mutex, to avoid mutex order
+    // inversion with Thread methods and the ThreadBase mutex.
     if (mPrimaryHardwareDev == nullptr) {
         return nullptr;
     }
@@ -3948,7 +4052,8 @@
                                                        patchRecord->bufferSize(),
                                                        outputFlags,
                                                        0ns /* timeout */,
-                                                       frameCountToBeReady);
+                                                       frameCountToBeReady,
+                                                       track->getSpeed());
         status = patchTrack->initCheck();
         if (status != NO_ERROR) {
             ALOGE("Secondary output patchTrack init failed: %d", status);
@@ -4116,20 +4221,31 @@
     int idOut = -1;
 
     status_t lStatus = NO_ERROR;
-
-    // TODO b/182392553: refactor or make clearer
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-    pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
-    if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(currentPid != -1 && currentPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, currentPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-        currentPid = callingPid;
+    uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    pid_t currentPid;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+        currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
+        if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW_IF(currentPid != -1 && currentPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, currentPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+            currentPid = callingPid;
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(request.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+        currentPid = adjAttributionSource.pid;
     }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
           adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
@@ -4566,6 +4682,10 @@
             if ((pt->type() == IAfThreadBase::MIXER || pt->type() == IAfThreadBase::OFFLOAD) &&
                     ((sessionType & IAfThreadBase::EFFECT_SESSION) != 0)) {
                 srcThread = pt.get();
+                if (srcThread == dstThread) {
+                    ALOGD("%s() same dst and src threads, ignoring move", __func__);
+                    return NO_ERROR;
+                }
                 ALOGW("%s() found srcOutput %d hosting AUDIO_SESSION_OUTPUT_MIX", __func__,
                       pt->id());
                 break;
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index 501aed1..46f4068 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -407,6 +407,8 @@
     void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
             EXCLUDES_AudioFlinger_ClientMutex;
 
+    const ::com::android::media::permission::IPermissionProvider& getPermissionProvider() final;
+
     // ---- end of IAfThreadCallback interface
 
     /* List available audio ports and their attributes */
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index ad043c8..c73b946 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -617,10 +617,11 @@
 
 }
 
+// return true if any effect started or stopped
 bool EffectModule::updateState_l() {
     audio_utils::lock_guard _l(mutex());
 
-    bool started = false;
+    bool startedOrStopped = false;
     switch (mState) {
     case RESTART:
         reset_l();
@@ -635,7 +636,7 @@
         }
         if (start_ll() == NO_ERROR) {
             mState = ACTIVE;
-            started = true;
+            startedOrStopped = true;
         } else {
             mState = IDLE;
         }
@@ -655,6 +656,7 @@
         // turn off sequence.
         if (--mDisableWaitCnt == 0) {
             reset_l();
+            startedOrStopped = true;
             mState = IDLE;
         }
         break;
@@ -669,7 +671,7 @@
         break;
     }
 
-    return started;
+    return startedOrStopped;
 }
 
 void EffectModule::process()
@@ -1358,15 +1360,16 @@
     }
 }
 
-status_t EffectModule::setVolume(uint32_t* left, uint32_t* right, bool controller, bool force) {
+status_t EffectModule::setVolume_l(uint32_t* left, uint32_t* right, bool controller, bool force) {
     AutoLockReentrant _l(mutex(), mSetVolumeReentrantTid);
     if (mStatus != NO_ERROR) {
         return mStatus;
     }
     status_t status = NO_ERROR;
     // Send volume indication if EFFECT_FLAG_VOLUME_IND is set and read back altered volume
-    // if controller flag is set (Note that controller == TRUE => EFFECT_FLAG_VOLUME_CTRL set)
-    if ((isProcessEnabled() || force) &&
+    // if controller flag is set (Note that controller == TRUE => the volume controller effect in
+    // the effect chain)
+    if (((isOffloadedOrDirect_l() ? isEnabled() : isProcessEnabled()) || force) &&
             ((mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_CTRL ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_IND ||
              (mDescriptor.flags & EFFECT_FLAG_VOLUME_MASK) == EFFECT_FLAG_VOLUME_MONITOR)) {
@@ -1377,7 +1380,8 @@
 
 status_t EffectModule::setVolumeInternal(
         uint32_t *left, uint32_t *right, bool controller) {
-    if (mVolume.has_value() && *left == mVolume.value()[0] && *right == mVolume.value()[1]) {
+    if (mVolume.has_value() && *left == mVolume.value()[0] && *right == mVolume.value()[1] &&
+            !controller) {
         LOG_ALWAYS_FATAL_IF(
                 !mReturnedVolume.has_value(),
                 "The cached returned volume must not be null when the cached volume has value");
@@ -1387,14 +1391,14 @@
     }
     LOG_ALWAYS_FATAL_IF(mEffectInterface == nullptr, "%s", mEffectInterfaceDebug.c_str());
     uint32_t volume[2] = {*left, *right};
-    uint32_t *pVolume = controller ? volume : nullptr;
+    uint32_t* pVolume = isVolumeControl() ? volume : nullptr;
     uint32_t size = sizeof(volume);
     status_t status = mEffectInterface->command(EFFECT_CMD_SET_VOLUME,
                                                 size,
                                                 volume,
                                                 &size,
                                                 pVolume);
-    if (controller && status == NO_ERROR && size == sizeof(volume)) {
+    if (pVolume && status == NO_ERROR && size == sizeof(volume)) {
         mVolume = {*left, *right}; // Cache the value that has been set
         *left = volume[0];
         *right = volume[1];
@@ -2308,6 +2312,9 @@
     }
     bool doResetVolume = false;
     for (size_t i = 0; i < size; i++) {
+        // reset volume when any effect just started or stopped.
+        // resetVolume_l will check if the volume controller effect in the chain needs update and
+        // apply the correct volume
         doResetVolume = mEffects[i]->updateState_l() || doResetVolume;
     }
     if (doResetVolume) {
@@ -2425,7 +2432,7 @@
             // volume will be set from setVolume_l.
             uint32_t left = 0;
             uint32_t right = 0;
-            effect->setVolume(&left, &right, true /*controller*/, true /*force*/);
+            effect->setVolume_l(&left, &right, true /*controller*/, true /*force*/);
         }
     }
 
@@ -2623,6 +2630,7 @@
 
     // first update volume controller
     const auto volumeControlIndex = findVolumeControl_l(0, size);
+    // index of the effect chain volume controller
     const int ctrlIdx = volumeControlIndex.value_or(-1);
     const sp<IAfEffectModule> volumeControlEffect =
             volumeControlIndex.has_value() ? mEffects[ctrlIdx] : nullptr;
@@ -2639,12 +2647,15 @@
     mVolumeControlEffect = volumeControlEffect;
 
     for (int i = 0; i < ctrlIdx; ++i) {
-        // For all volume control effects before the effect that controls volume, set the volume
+        // For all effects before the effect that controls volume, they are not controlling the
+        // effect chain volume, if these effects has the volume control capability, set the volume
         // to maximum to avoid double attenuation.
         if (mEffects[i]->isVolumeControl()) {
             uint32_t leftMax = 1 << 24;
             uint32_t rightMax = 1 << 24;
-            mEffects[i]->setVolume(&leftMax, &rightMax, true /*controller*/, true /*force*/);
+            mEffects[i]->setVolume_l(&leftMax, &rightMax,
+                                     false /* not an effect chain volume controller */,
+                                     true /* force */);
         }
     }
 
@@ -2653,9 +2664,13 @@
 
     // second get volume update from volume controller
     if (ctrlIdx >= 0) {
-        mEffects[ctrlIdx]->setVolume(&newLeft, &newRight, true);
+        mEffects[ctrlIdx]->setVolume_l(&newLeft, &newRight,
+                                       true /* effect chain volume controller */);
         mNewLeftVolume = newLeft;
         mNewRightVolume = newRight;
+        ALOGD("%s sessionId %d volume controller effect %s set (%d, %d), ret (%d, %d)", __func__,
+              mSessionId, mEffects[ctrlIdx]->desc().name, mLeftVolume, mRightVolume, newLeft,
+              newRight);
     }
     // then indicate volume to all other effects in chain.
     // Pass altered volume to effects before volume controller
@@ -2674,9 +2689,11 @@
         }
         // Pass requested volume directly if this is volume monitor module
         if (mEffects[i]->isVolumeMonitor()) {
-            mEffects[i]->setVolume(left, right, false);
+            mEffects[i]->setVolume_l(left, right,
+                                     false /* not an effect chain volume controller */);
         } else {
-            mEffects[i]->setVolume(&lVol, &rVol, false);
+            mEffects[i]->setVolume_l(&lVol, &rVol,
+                                     false /* not an effect chain volume controller */);
         }
     }
     *left = newLeft;
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index b516c37..d107543 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -217,7 +217,8 @@
     }
     status_t setDevices(const AudioDeviceTypeAddrVector& devices) final EXCLUDES_EffectBase_Mutex;
     status_t setInputDevice(const AudioDeviceTypeAddr& device) final EXCLUDES_EffectBase_Mutex;
-    status_t setVolume(uint32_t *left, uint32_t *right, bool controller, bool force) final;
+    status_t setVolume_l(uint32_t* left, uint32_t* right, bool controller, bool force) final
+            REQUIRES(audio_utils::EffectChain_Mutex);
     status_t setMode(audio_mode_t mode) final EXCLUDES_EffectBase_Mutex;
     status_t setAudioSource(audio_source_t source) final EXCLUDES_EffectBase_Mutex;
     status_t start_l() final REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex;
@@ -265,8 +266,8 @@
                 ? EFFECT_BUFFER_ACCESS_WRITE : EFFECT_BUFFER_ACCESS_ACCUMULATE;
     }
 
-    status_t setVolumeInternal(uint32_t *left, uint32_t *right, bool controller);
-
+    status_t setVolumeInternal(uint32_t* left, uint32_t* right,
+                               bool controller /* the volume controller effect of the chain */);
 
     effect_config_t     mConfig;    // input and output audio configuration
     sp<EffectHalInterface> mEffectInterface; // Effect module HAL
diff --git a/services/audioflinger/IAfEffect.h b/services/audioflinger/IAfEffect.h
index b9bb18c..bb82afb 100644
--- a/services/audioflinger/IAfEffect.h
+++ b/services/audioflinger/IAfEffect.h
@@ -163,8 +163,9 @@
     virtual int16_t *inBuffer() const = 0;
     virtual status_t setDevices(const AudioDeviceTypeAddrVector &devices) = 0;
     virtual status_t setInputDevice(const AudioDeviceTypeAddr &device) = 0;
-    virtual status_t setVolume(uint32_t *left, uint32_t *right, bool controller,
-                               bool force = false) = 0;
+    virtual status_t setVolume_l(uint32_t* left, uint32_t* right,
+                                 bool controller /* effect controlling chain volume */,
+                                 bool force = false) REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual status_t setOffloaded_l(bool offloaded, audio_io_handle_t io) = 0;
     virtual bool isOffloaded_l() const = 0;
 
@@ -188,11 +189,13 @@
     virtual status_t sendMetadata_ll(const std::vector<playback_track_metadata_v7_t>& metadata)
             REQUIRES(audio_utils::ThreadBase_Mutex,
                      audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
+    // return true if there was a state change from STARTING to ACTIVE, or STOPPED to IDLE, effect
+    // chain will do a volume reset in these two cases
+    virtual bool updateState_l()
+            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
 
 private:
     virtual void process() = 0;
-    virtual bool updateState_l()
-            REQUIRES(audio_utils::EffectChain_Mutex) EXCLUDES_EffectBase_Mutex = 0;
     virtual void reset_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual status_t configure_l() REQUIRES(audio_utils::EffectChain_Mutex) = 0;
     virtual status_t init_l()
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index 4518d48..4d26aa0 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -19,8 +19,9 @@
 #include <android/media/IAudioTrackCallback.h>
 #include <android/media/IEffectClient.h>
 #include <audiomanager/IAudioManager.h>
-#include <audio_utils/mutex.h>
+#include <audio_utils/DeferredExecutor.h>
 #include <audio_utils/MelProcessor.h>
+#include <audio_utils/mutex.h>
 #include <binder/MemoryDealer.h>
 #include <datapath/AudioStreamIn.h>
 #include <datapath/AudioStreamOut.h>
@@ -36,6 +37,10 @@
 
 #include <optional>
 
+namespace com::android::media::permission {
+    class IPermissionProvider;
+}
+
 namespace android {
 
 class IAfDirectOutputThread;
@@ -121,6 +126,9 @@
             EXCLUDES_AudioFlinger_ClientMutex = 0;
 
     virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
+
+    virtual const ::com::android::media::permission::IPermissionProvider&
+            getPermissionProvider() = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
@@ -395,6 +403,12 @@
     // avoid races.
     virtual void waitWhileThreadBusy_l(audio_utils::unique_lock& ul)
             REQUIRES(mutex()) = 0;
+
+    // The ThreadloopExecutor is used to defer functors or dtors
+    // to when the Threadloop does not hold any mutexes (at the end of the
+    // processing period cycle).
+    virtual audio_utils::DeferredExecutor& getThreadloopExecutor() = 0;
+
     // Dynamic cast to derived interface
     virtual sp<IAfDirectOutputThread> asIAfDirectOutputThread() { return nullptr; }
     virtual sp<IAfDuplicatingThread> asIAfDuplicatingThread() { return nullptr; }
diff --git a/services/audioflinger/IAfTrack.h b/services/audioflinger/IAfTrack.h
index 168bcca..a9c87ad 100644
--- a/services/audioflinger/IAfTrack.h
+++ b/services/audioflinger/IAfTrack.h
@@ -579,10 +579,11 @@
             size_t bufferSize,
             audio_output_flags_t flags,
             const Timeout& timeout = {},
-            size_t frameCountToBeReady = 1 /** Default behaviour is to start
+            size_t frameCountToBeReady = 1, /** Default behaviour is to start
                                              *  as soon as possible to have
                                              *  the lowest possible latency
-                                             *  even if it might glitch. */);
+                                             *  even if it might glitch. */
+            float speed = 1.0f);
 };
 
 class IAfPatchRecord : public virtual IAfRecordTrack, public virtual IAfPatchTrackBase {
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index 4333cc8..f57470f 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -646,7 +646,8 @@
                                            tempRecordTrack->bufferSize(),
                                            outputFlags,
                                            {} /*timeout*/,
-                                           frameCountToBeReady);
+                                           frameCountToBeReady,
+                                           1.0f);
     status = mPlayback.checkTrack(tempPatchTrack.get());
     if (status != NO_ERROR) {
         return status;
diff --git a/services/audioflinger/PlaybackTracks.h b/services/audioflinger/PlaybackTracks.h
index 1e1b6d2..2cc6236 100644
--- a/services/audioflinger/PlaybackTracks.h
+++ b/services/audioflinger/PlaybackTracks.h
@@ -455,7 +455,7 @@
     void                queueBuffer(Buffer& inBuffer);
     void                clearBufferQueue();
 
-    void                restartIfDisabled();
+    void restartIfDisabled() override;
 
     // Maximum number of pending buffers allocated by OutputTrack::write()
     static const uint8_t kMaxOverFlowBuffers = 10;
@@ -497,10 +497,11 @@
                                    size_t bufferSize,
                                    audio_output_flags_t flags,
                                    const Timeout& timeout = {},
-                                   size_t frameCountToBeReady = 1 /** Default behaviour is to start
+                                   size_t frameCountToBeReady = 1, /** Default behaviour is to start
                                                                     *  as soon as possible to have
                                                                     *  the lowest possible latency
-                                                                    *  even if it might glitch. */);
+                                                                    *  even if it might glitch. */
+                                   float speed = 1.0f);
     ~PatchTrack() override;
 
     size_t framesReady() const final;
@@ -518,7 +519,7 @@
     void releaseBuffer(Proxy::Buffer* buffer) final;
 
 private:
-            void restartIfDisabled();
+    void restartIfDisabled() override;
 };  // end of PatchTrack
 
 } // namespace android
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 66e89e4..2dcbbce 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -72,6 +72,7 @@
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
 #include <media/nbaio/SourceAudioBufferProvider.h>
+#include <media/ValidatedAttributionSourceState.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/Process.h>
 #include <mediautils/SchedulingPolicyService.h>
@@ -120,6 +121,8 @@
     return a < b ? a : b;
 }
 
+using com::android::media::permission::ValidatedAttributionSourceState;
+
 namespace android {
 
 using audioflinger::SyncEvent;
@@ -4646,7 +4649,11 @@
 
         // FIXME Note that the above .clear() is no longer necessary since effectChains
         // is now local to this block, but will keep it for now (at least until merge done).
+
+        mThreadloopExecutor.process();
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     threadLoop_exit();
 
@@ -5089,7 +5096,6 @@
         // mPipeSink below
         // mNormalSink below
 {
-    setMasterBalance(afThreadCallback->getMasterBalance_l());
     ALOGV("MixerThread() id=%d type=%d", id, type);
     ALOGV("mSampleRate=%u, mChannelMask=%#x, mChannelCount=%u, mFormat=%#x, mFrameSize=%zu, "
             "mFrameCount=%zu, mNormalFrameCount=%zu",
@@ -5101,6 +5107,8 @@
         // The Duplicating thread uses the AudioMixer and delivers data to OutputTracks
         // (downstream MixerThreads) in DuplicatingThread::threadLoop_write().
         // Do not create or use mFastMixer, mOutputSink, mPipeSink, or mNormalSink.
+        // Balance is *not* set in the DuplicatingThread here (or from AudioFlinger),
+        // as the downstream MixerThreads implement it.
         return;
     }
     // create an NBAIO sink for the HAL output stream, and negotiate
@@ -5260,6 +5268,9 @@
         mNormalSink = initFastMixer ? mPipeSink : mOutputSink;
         break;
     }
+    // setMasterBalance needs to be called after the FastMixer
+    // (if any) is set up, in order to deliver the balance settings to it.
+    setMasterBalance(afThreadCallback->getMasterBalance_l());
 }
 
 MixerThread::~MixerThread()
@@ -7185,14 +7196,11 @@
 {
     PlaybackThread::flushHw_l();
     mOutput->flush();
+    mHwPaused = false;
     mFlushPending = false;
     mTimestampVerifier.discontinuity(discontinuityForStandbyOrFlush());
     mTimestamp.clear();
     mMonotonicFrameCounter.onFlush();
-    // We do not reset mHwPaused which is hidden from the Track client.
-    // Note: the client track in Tracks.cpp and AudioTrack.cpp
-    // has a FLUSHED state but the DirectOutputThread does not;
-    // those tracks will continue to show isStopped().
 }
 
 int64_t DirectOutputThread::computeWaitTimeNs_l() const {
@@ -8670,6 +8678,9 @@
 
         // loop over each active track
         for (size_t i = 0; i < size; i++) {
+            if (activeTrack) {  // ensure track release is outside lock.
+                oldActiveTracks.emplace_back(std::move(activeTrack));
+            }
             activeTrack = activeTracks[i];
 
             // skip fast tracks, as those are handled directly by FastCapture
@@ -8813,11 +8824,14 @@
             mIoJitterMs.add(jitterMs);
             mProcessTimeMs.add(processMs);
         }
+       mThreadloopExecutor.process();
         // update timing info.
         mLastIoBeginNs = lastIoBeginNs;
         mLastIoEndNs = lastIoEndNs;
         lastLoopCountRead = loopCount;
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     standbyIfNotAlreadyInStandby();
 
@@ -10291,8 +10305,23 @@
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
 
     audio_io_handle_t io = mId;
-    const AttributionSourceState adjAttributionSource = afutils::checkAttributionSourcePackage(
-            client.attributionSource);
+    AttributionSourceState adjAttributionSource;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                client.attributionSource);
+    } else {
+        // TODO(b/342475009) validate in oboeservice, and plumb downwards
+        auto validatedRes = ValidatedAttributionSourceState::createFromTrustedUidNoPackage(
+                    client.attributionSource,
+                    mAfThreadCallback->getPermissionProvider()
+                );
+        if (!validatedRes.has_value()) {
+            ALOGE("MMAP client package validation fail: %s",
+                    validatedRes.error().toString8().c_str());
+            return aidl_utils::statusTFromBinderStatus(validatedRes.error());
+        }
+        adjAttributionSource = std::move(validatedRes.value()).unwrapInto();
+    }
 
     const auto localSessionId = mSessionId;
     auto localAttr = mAttr;
@@ -10603,7 +10632,10 @@
         unlockEffectChains(effectChains);
         // Effect chains will be actually deleted here if they were removed from
         // mEffectChains list during mixing or effects processing
+        mThreadloopExecutor.process();
     }
+    mThreadloopExecutor.process(); // process any remaining deferred actions.
+    // deferred actions after this point are ignored.
 
     threadLoop_exit();
 
diff --git a/services/audioflinger/Threads.h b/services/audioflinger/Threads.h
index 4b2ade4..654b841 100644
--- a/services/audioflinger/Threads.h
+++ b/services/audioflinger/Threads.h
@@ -571,6 +571,10 @@
     void stopMelComputation_l() override
             REQUIRES(audio_utils::AudioFlinger_Mutex);
 
+    audio_utils::DeferredExecutor& getThreadloopExecutor() override {
+        return mThreadloopExecutor;
+    }
+
 protected:
 
                 // entry describing an effect being suspended in mSuspendedSessions keyed vector
@@ -877,6 +881,14 @@
 
                 SimpleLog mLocalLog;  // locked internally
 
+    // mThreadloopExecutor contains deferred functors and object (dtors) to
+    // be executed at the end of the processing period, without any
+    // mutexes held.
+    //
+    // mThreadloopExecutor is locked internally, so its methods are thread-safe
+    // for access.
+    audio_utils::DeferredExecutor mThreadloopExecutor;
+
     private:
     void dumpBase_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
     void dumpEffectChains_l(int fd, const Vector<String16>& args) REQUIRES(mutex());
diff --git a/services/audioflinger/TrackBase.h b/services/audioflinger/TrackBase.h
index 5708c61..a0b85f7 100644
--- a/services/audioflinger/TrackBase.h
+++ b/services/audioflinger/TrackBase.h
@@ -333,6 +333,9 @@
                                     // true for Track, false for RecordTrack,
                                     // this could be a track type if needed later
 
+    void deferRestartIfDisabled();
+    virtual void restartIfDisabled() {}
+
     const wp<IAfThreadBase> mThread;
     const alloc_type     mAllocType;
     /*const*/ sp<Client> mClient;   // see explanation at ~TrackBase() why not const
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index f8cfc12..f5f11cc 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -314,6 +314,17 @@
     return NO_ERROR;
 }
 
+void TrackBase::deferRestartIfDisabled()
+{
+    const auto thread = mThread.promote();
+    if (thread == nullptr) return;
+    thread->getThreadloopExecutor().defer(
+            [track = wp<TrackBase>::fromExisting(this)] {
+            const auto actual = track.promote();
+            if (actual) actual->restartIfDisabled();
+        });
+}
+
 PatchTrackBase::PatchTrackBase(const sp<ClientProxy>& proxy,
         IAfThreadBase* thread, const Timeout& timeout)
     : mProxy(proxy)
@@ -2310,7 +2321,7 @@
                 waitTimeLeftMs = 0;
             }
             if (status == NOT_ENOUGH_DATA) {
-                restartIfDisabled();
+                deferRestartIfDisabled();
                 continue;
             }
         }
@@ -2322,7 +2333,7 @@
         buf.mFrameCount = outFrames;
         buf.mRaw = NULL;
         mClientProxy->releaseBuffer(&buf);
-        restartIfDisabled();
+        deferRestartIfDisabled();
         pInBuffer->frameCount -= outFrames;
         pInBuffer->raw = (int8_t *)pInBuffer->raw + outFrames * mFrameSize;
         mOutBuffer.frameCount -= outFrames;
@@ -2449,10 +2460,11 @@
         size_t bufferSize,
         audio_output_flags_t flags,
         const Timeout& timeout,
-        size_t frameCountToBeReady /** Default behaviour is to start
+        size_t frameCountToBeReady, /** Default behaviour is to start
                                          *  as soon as possible to have
                                          *  the lowest possible latency
-                                         *  even if it might glitch. */)
+                                         *  even if it might glitch. */
+        float speed)
 {
     return sp<PatchTrack>::make(
             playbackThread,
@@ -2465,7 +2477,8 @@
             bufferSize,
             flags,
             timeout,
-            frameCountToBeReady);
+            frameCountToBeReady,
+            speed);
 }
 
 PatchTrack::PatchTrack(IAfPlaybackThread* playbackThread,
@@ -2478,17 +2491,26 @@
                                                      size_t bufferSize,
                                                      audio_output_flags_t flags,
                                                      const Timeout& timeout,
-                                                     size_t frameCountToBeReady)
+                                                     size_t frameCountToBeReady,
+                                                     float speed)
     :   Track(playbackThread, NULL, streamType,
               audio_attributes_t{} /* currently unused for patch track */,
               sampleRate, format, channelMask, frameCount,
               buffer, bufferSize, nullptr /* sharedBuffer */,
               AUDIO_SESSION_NONE, getpid(), audioServerAttributionSource(getpid()), flags,
-              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady),
-        PatchTrackBase(mCblk ? new ClientProxy(mCblk, mBuffer, frameCount, mFrameSize, true, true)
-                        : nullptr,
+              TYPE_PATCH, AUDIO_PORT_HANDLE_NONE, frameCountToBeReady, speed),
+        PatchTrackBase(mCblk ? new AudioTrackClientProxy(mCblk, mBuffer, frameCount, mFrameSize,
+                        true /*clientInServer*/) : nullptr,
                        playbackThread, timeout)
 {
+    if (mProxy != nullptr) {
+        sp<AudioTrackClientProxy>::cast(mProxy)->setPlaybackRate({
+                /* .mSpeed = */ speed,
+                /* .mPitch = */ AUDIO_TIMESTRETCH_PITCH_NORMAL,
+                /* .mStretchMode = */ AUDIO_TIMESTRETCH_STRETCH_DEFAULT,
+                /* .mFallbackMode = */ AUDIO_TIMESTRETCH_FALLBACK_FAIL
+        });
+    }
     ALOGV("%s(%d): sampleRate %d mPeerTimeout %d.%03d sec",
                                       __func__, mId, sampleRate,
                                       (int)mPeerTimeout.tv_sec,
@@ -2566,7 +2588,7 @@
     const size_t originalFrameCount = buffer->mFrameCount;
     do {
         if (status == NOT_ENOUGH_DATA) {
-            restartIfDisabled();
+            deferRestartIfDisabled();
             buffer->mFrameCount = originalFrameCount; // cleared on error, must be restored.
         }
         status = mProxy->obtainBuffer(buffer, timeOut);
@@ -2577,7 +2599,7 @@
 void PatchTrack::releaseBuffer(Proxy::Buffer* buffer)
 {
     mProxy->releaseBuffer(buffer);
-    restartIfDisabled();
+    deferRestartIfDisabled();
 
     // Check if the PatchTrack has enough data to write once in releaseBuffer().
     // If not, prevent an underrun from occurring by moving the track into FS_FILLING;
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index e91e2a3..deb7345 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -294,8 +294,7 @@
     virtual status_t startAudioSource(const struct audio_port_config *source,
                                       const audio_attributes_t *attributes,
                                       audio_port_handle_t *portId,
-                                      uid_t uid,
-                                      bool internal = false) = 0;
+                                      uid_t uid) = 0;
     virtual status_t stopAudioSource(audio_port_handle_t portId) = 0;
 
     virtual status_t setMasterMono(bool mono) = 0;
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
index 6167f95..e519766 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioIODescriptorInterface.h
@@ -47,13 +47,17 @@
 
     if (active) {
         // On MMAP IOs, the preferred device is selected by the first client (virtual client
-        // created when the mmap stream is opened). This client is never active.
+        // created when the mmap stream is opened). This client is never active and we only
+        // consider the Filter criteria, not the active state.
         // On non MMAP IOs, the preferred device is honored only if all active clients have
         // a preferred device in which case the first client drives the selection.
         if (desc->isMmap()) {
-            // The client list is never empty on a MMAP IO
-            return devices.getDeviceFromId(
-                    desc->clientsList(false /*activeOnly*/)[0]->preferredDeviceId());
+            auto matchingClients = desc->clientsList(
+                    false /*activeOnly*/, filter, false /*preferredDevice*/);
+            if (matchingClients.empty()) {
+                return nullptr;
+            }
+            return devices.getDeviceFromId(matchingClients[0]->preferredDeviceId());
         } else {
             auto activeClientsWithRoute =
                 desc->clientsList(true /*activeOnly*/, filter, true /*preferredDevice*/);
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index c26ea10..0f2fe24 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -41,7 +41,8 @@
 {
 public:
     AudioInputDescriptor(const sp<IOProfile>& profile,
-                         AudioPolicyClientInterface *clientInterface);
+                         AudioPolicyClientInterface *clientInterface,
+                         bool isPreemptor);
 
     virtual ~AudioInputDescriptor() = default;
 
@@ -127,6 +128,8 @@
     // active use case
     void checkSuspendEffects();
 
+    bool isPreemptor() const { return mIsPreemptor; }
+
  private:
 
     void updateClientRecordingConfiguration(int event, const sp<RecordClientDescriptor>& client);
@@ -145,6 +148,7 @@
     int32_t mGlobalActiveCount = 0;  // non-client-specific activity ref count
     EffectDescriptorCollection mEnabledEffects;
     audio_input_flags_t& mFlags = AudioPortConfig::mFlags.input;
+    bool mIsPreemptor; // true if this input was opened after preemting another one
 };
 
 class AudioInputCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
index a596c43..60da405 100644
--- a/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/ClientDescriptor.h
@@ -63,6 +63,8 @@
      * HW Audio Source.
      */
     virtual bool isInternal() const { return false; }
+    virtual bool isCallRx() const { return false; }
+    virtual bool isCallTx() const { return false; }
     audio_port_handle_t portId() const { return mPortId; }
     uid_t uid() const { return mUid; }
     audio_session_t session() const { return mSessionId; };
@@ -236,7 +238,7 @@
                            const sp<DeviceDescriptor>& srcDevice,
                            audio_stream_type_t stream, product_strategy_t strategy,
                            VolumeSource volumeSource,
-                           bool isInternal);
+                           bool isInternal, bool isCallRx, bool isCallTx);
 
     ~SourceClientDescriptor() override = default;
 
@@ -263,6 +265,8 @@
     wp<HwAudioOutputDescriptor> hwOutput() const { return mHwOutput; }
     void setHwOutput(const sp<HwAudioOutputDescriptor>& hwOutput);
     bool isInternal() const override { return mIsInternal; }
+    bool isCallRx() const override { return mIsCallRx; }
+    bool isCallTx() const override { return mIsCallTx; }
 
     using ClientDescriptor::dump;
     void dump(String8 *dst, int spaces) const override;
@@ -294,6 +298,8 @@
      * requester to prevent rerouting SwOutput involved in raw patches.
      */
     bool mIsInternal = false;
+    bool mIsCallRx = false;
+    bool mIsCallTx = false;
 };
 
 class SourceClientCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
index c502fc2..7002e63 100644
--- a/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/DeviceDescriptor.h
@@ -282,6 +282,11 @@
 
     const AudioProfileVector& getSupportedProfiles() { return mSupportedProfiles; }
 
+    /**
+     * @brief checks if all devices in device vector are attached to the HwModule or not
+     * @return true if all the devices in device vector are attached, otherwise false
+     */
+    bool areAllDevicesAttached() const;
     // Return a string to describe the DeviceVector. The sensitive information will only be
     // added to the string if `includeSensitiveInfo` is true.
     std::string toString(bool includeSensitiveInfo = false) const;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 44f84b9..5a0fd97 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -30,9 +30,10 @@
 namespace android {
 
 AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile,
-                                           AudioPolicyClientInterface *clientInterface)
+                                           AudioPolicyClientInterface *clientInterface,
+                                           bool isPreemptor)
     : mProfile(profile)
-    ,  mClientInterface(clientInterface)
+    ,  mClientInterface(clientInterface), mIsPreemptor(isPreemptor)
 {
     if (profile != NULL) {
         profile->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
@@ -275,6 +276,9 @@
                             "%s invalid profile active count %u",
                             __func__, mProfile->curActiveCount);
         mProfile->curActiveCount--;
+        // allow preemption again now that at least one client was able to
+        // capture on this input
+        mIsPreemptor = false;
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
index 667c189..ad6977b 100644
--- a/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/ClientDescriptor.cpp
@@ -96,12 +96,14 @@
 SourceClientDescriptor::SourceClientDescriptor(audio_port_handle_t portId, uid_t uid,
          audio_attributes_t attributes, const struct audio_port_config &config,
          const sp<DeviceDescriptor>& srcDevice, audio_stream_type_t stream,
-         product_strategy_t strategy, VolumeSource volumeSource, bool isInternal) :
+         product_strategy_t strategy, VolumeSource volumeSource,
+         bool isInternal, bool isCallRx, bool isCallTx) :
     TrackClientDescriptor::TrackClientDescriptor(portId, uid, AUDIO_SESSION_NONE, attributes,
         {config.sample_rate, config.channel_mask, config.format}, AUDIO_PORT_HANDLE_NONE,
         stream, strategy, volumeSource, AUDIO_OUTPUT_FLAG_NONE, false,
         {} /* Sources do not support secondary outputs*/, nullptr),
-    mSrcDevice(srcDevice), mIsInternal(isInternal)
+    mSrcDevice(srcDevice), mIsInternal(isInternal),
+    mIsCallRx(isCallRx), mIsCallTx(isCallTx)
 {
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
index 9f7b8fc..46a04de 100644
--- a/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/DeviceDescriptor.cpp
@@ -541,4 +541,14 @@
     return filteredDevices;
 }
 
+bool DeviceVector::areAllDevicesAttached() const
+{
+    for (const auto &device : *this) {
+        if (!device->isAttached()) {
+            return false;
+        }
+    }
+    return true;
+}
+
 } // namespace android
diff --git a/services/audiopolicy/enginedefault/Android.bp b/services/audiopolicy/enginedefault/Android.bp
index 799b8d9..aec8c16 100644
--- a/services/audiopolicy/enginedefault/Android.bp
+++ b/services/audiopolicy/enginedefault/Android.bp
@@ -30,6 +30,7 @@
         "libaudiopolicyengine_config",
     ],
     shared_libs: [
+        "com.android.media.audioserver-aconfig-cc",
         "libaudio_aidl_conversion_common_cpp",
         "libaudiofoundation",
         "libaudiopolicy",
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 13cc165..7666a2b 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -30,6 +30,7 @@
 #include <PolicyAudioPort.h>
 #include <IOProfile.h>
 #include <AudioIODescriptorInterface.h>
+#include <com_android_media_audioserver.h>
 #include <policy.h>
 #include <media/AudioContainers.h>
 #include <utils/String8.h>
@@ -154,12 +155,58 @@
     return EngineBase::setForceUse(usage, config);
 }
 
+bool Engine::isBtScoActive(DeviceVector& availableOutputDevices,
+                           const SwAudioOutputCollection &outputs) const {
+    if (availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
+        return false;
+    }
+    // SCO is active if:
+    // 1) we are in a call and SCO is the preferred device for PHONE strategy
+    if (isInCall() && audio_is_bluetooth_out_sco_device(
+            getPreferredDeviceTypeForLegacyStrategy(availableOutputDevices, STRATEGY_PHONE))) {
+        return true;
+    }
+
+    // 2) A strategy for which the preferred device is SCO is active
+    for (const auto &ps : getOrderedProductStrategies()) {
+        if (outputs.isStrategyActive(ps) &&
+            !getPreferredAvailableDevicesForProductStrategy(availableOutputDevices, ps)
+                .getDevicesFromTypes(getAudioDeviceOutAllScoSet()).isEmpty()) {
+            return true;
+        }
+    }
+    // 3) a ringtone is active and SCO is used for ringing
+    if (outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_RING))
+          && (getForceUse(AUDIO_POLICY_FORCE_FOR_VIBRATE_RINGING)
+                    == AUDIO_POLICY_FORCE_BT_SCO)) {
+        return true;
+    }
+    // 4) an active input is routed from SCO
+    DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
+    const auto &inputs = getApmObserver()->getInputs();
+    if (inputs.activeInputsCountOnDevices(availableInputDevices.getDevicesFromType(
+            AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET)) > 0) {
+        return true;
+    }
+    return false;
+}
+
 void Engine::filterOutputDevicesForStrategy(legacy_strategy strategy,
                                             DeviceVector& availableOutputDevices,
                                             const SwAudioOutputCollection &outputs) const
 {
     DeviceVector availableInputDevices = getApmObserver()->getAvailableInputDevices();
 
+    if (com::android::media::audioserver::use_bt_sco_for_media()) {
+        // remove A2DP and LE Audio devices whenever BT SCO is in use
+        if (isBtScoActive(availableOutputDevices, outputs)) {
+            availableOutputDevices.remove(
+                availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllA2dpSet()));
+            availableOutputDevices.remove(
+                availableOutputDevices.getDevicesFromTypes(getAudioDeviceOutAllBleSet()));
+        }
+    }
+
     switch (strategy) {
     case STRATEGY_SONIFICATION_RESPECTFUL: {
         if (!(isInCall() || outputs.isActiveLocally(toVolumeSource(AUDIO_STREAM_VOICE_CALL)))) {
@@ -381,10 +428,12 @@
 
         // LE audio broadcast device is only used if:
         // - No call is active
-        // - either MEDIA or SONIFICATION_RESPECTFUL is the highest priority active strategy
-        //   OR the LE audio unicast device is not active
+        // - either MEDIA, SONIFICATION_RESPECTFUL or SONIFICATION is the highest priority
+        // active strategy
+        // OR the LE audio unicast device is not active
         if (devices2.isEmpty() && !isInCall()
-                && (strategy == STRATEGY_MEDIA || strategy == STRATEGY_SONIFICATION_RESPECTFUL)) {
+                && (strategy == STRATEGY_MEDIA || strategy == STRATEGY_SONIFICATION_RESPECTFUL
+                      || strategy == STRATEGY_SONIFICATION)) {
             legacy_strategy topActiveStrategy = STRATEGY_NONE;
             for (const auto &ps : getOrderedProductStrategies()) {
                 if (outputs.isStrategyActive(ps)) {
@@ -396,6 +445,7 @@
 
             if (topActiveStrategy == STRATEGY_NONE || topActiveStrategy == STRATEGY_MEDIA
                     || topActiveStrategy == STRATEGY_SONIFICATION_RESPECTFUL
+                    || topActiveStrategy == STRATEGY_SONIFICATION
                     || !outputs.isAnyDeviceTypeActive(getAudioDeviceOutLeAudioUnicastSet())) {
                 devices2 =
                         availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_BLE_BROADCAST);
@@ -418,15 +468,27 @@
                         getLastRemovableMediaDevices(GROUP_WIRED, excludedDevices));
             }
         }
+
+        if (com::android::media::audioserver::use_bt_sco_for_media()) {
+            if (devices2.isEmpty() && isBtScoActive(availableOutputDevices, outputs)) {
+                devices2 = availableOutputDevices.getFirstDevicesFromTypes(
+                        { AUDIO_DEVICE_OUT_BLUETOOTH_SCO_CARKIT,
+                          AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET,
+                          AUDIO_DEVICE_OUT_BLUETOOTH_SCO});
+            }
+        }
+
         if ((devices2.isEmpty()) &&
                 (getForceUse(AUDIO_POLICY_FORCE_FOR_DOCK) == AUDIO_POLICY_FORCE_ANALOG_DOCK)) {
             devices2 = availableOutputDevices.getDevicesFromType(
                     AUDIO_DEVICE_OUT_ANLG_DOCK_HEADSET);
         }
+
         if (devices2.isEmpty()) {
             devices2 = availableOutputDevices.getFirstDevicesFromTypes({
                         AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET, AUDIO_DEVICE_OUT_SPEAKER});
         }
+
         DeviceVector devices3;
         if (strategy == STRATEGY_MEDIA) {
             // ARC, SPDIF and AUX_LINE can co-exist with others.
diff --git a/services/audiopolicy/enginedefault/src/Engine.h b/services/audiopolicy/enginedefault/src/Engine.h
index 878bca9..a6090cf 100644
--- a/services/audiopolicy/enginedefault/src/Engine.h
+++ b/services/audiopolicy/enginedefault/src/Engine.h
@@ -109,6 +109,9 @@
     DeviceVector getDisabledDevicesForInputSource(
             const DeviceVector& availableInputDevices, audio_source_t inputSource) const;
 
+    bool isBtScoActive(DeviceVector& availableOutputDevices,
+                       const SwAudioOutputCollection &outputs) const;
+
     std::map<product_strategy_t, legacy_strategy> mLegacyStrategyMap;
 };
 } // namespace audio_policy
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 6a94c81..e5fe2d2 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -66,6 +66,7 @@
 using android::media::audio::common::AudioDeviceAddress;
 using android::media::audio::common::AudioPortDeviceExt;
 using android::media::audio::common::AudioPortExt;
+using com::android::media::audioserver::fix_call_audio_patch;
 using content::AttributionSourceState;
 
 //FIXME: workaround for truncated touch sounds
@@ -374,6 +375,7 @@
         checkLeBroadcastRoutes(wasLeUnicastActive, nullptr, 0);
 
         mpClientInterface->onAudioPortListUpdate();
+        ALOGV("%s() completed for device: %s", __func__, device->toString().c_str());
         return NO_ERROR;
     }  // end if is output device
 
@@ -389,6 +391,8 @@
                 return INVALID_OPERATION;
             }
 
+            ALOGV("%s() connecting device %s", __func__, device->toString().c_str());
+
             if (mAvailableInputDevices.add(device) < 0) {
                 return NO_MEMORY;
             }
@@ -461,6 +465,7 @@
         }
 
         mpClientInterface->onAudioPortListUpdate();
+        ALOGV("%s() completed for device: %s", __func__, device->toString().c_str());
         return NO_ERROR;
     } // end if is input device
 
@@ -704,8 +709,10 @@
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
 
-    disconnectTelephonyAudioSource(mCallRxSourceClient);
-    disconnectTelephonyAudioSource(mCallTxSourceClient);
+    if (!fix_call_audio_patch()) {
+        disconnectTelephonyAudioSource(mCallRxSourceClient);
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
+    }
 
     if (rxDevices.isEmpty()) {
         ALOGW("%s() no selected output device", __func__);
@@ -758,13 +765,16 @@
     // Use legacy routing method for voice calls via setOutputDevice() on primary output.
     // Otherwise, create two audio patches for TX and RX path.
     if (!createRxPatch) {
+        if (fix_call_audio_patch()) {
+            disconnectTelephonyAudioSource(mCallRxSourceClient);
+        }
         if (!hasPrimaryOutput()) {
             ALOGW("%s() no primary output available", __func__);
             return INVALID_OPERATION;
         }
         muteWaitMs = setOutputDevices(__func__, mPrimaryOutput, rxDevices, true, delayMs);
     } else { // create RX path audio patch
-        connectTelephonyRxAudioSource();
+        connectTelephonyRxAudioSource(delayMs);
         // If the TX device is on the primary HW module but RX device is
         // on other HW module, SinkMetaData of telephony input should handle it
         // assuming the device uses audio HAL V5.0 and above
@@ -780,6 +790,8 @@
             }
         }
         connectTelephonyTxAudioSource(txSourceDevice, txSinkDevice, delayMs);
+    } else if (fix_call_audio_patch()) {
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
     }
     if (waitMs != nullptr) {
         *waitMs = muteWaitMs;
@@ -799,19 +811,40 @@
     return false;
 }
 
-void AudioPolicyManager::connectTelephonyRxAudioSource()
+void AudioPolicyManager::connectTelephonyRxAudioSource(uint32_t delayMs)
 {
-    disconnectTelephonyAudioSource(mCallRxSourceClient);
+    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
+    if (fix_call_audio_patch()) {
+        if (mCallRxSourceClient != nullptr) {
+            DeviceVector rxDevices =
+                  mEngine->getOutputDevicesForAttributes(aa, nullptr, false /*fromCache*/);
+            ALOG_ASSERT(!rxDevices.isEmpty() || !mCallRxSourceClient->isConnected(),
+                        "connectTelephonyRxAudioSource(): no device found for call RX source");
+            sp<DeviceDescriptor> rxDevice = rxDevices.itemAt(0);
+            if (mCallRxSourceClient->isConnected()
+                    && mCallRxSourceClient->sinkDevice()->equals(rxDevice)) {
+                return;
+            }
+            disconnectTelephonyAudioSource(mCallRxSourceClient);
+        }
+    } else {
+        disconnectTelephonyAudioSource(mCallRxSourceClient);
+    }
+
     const struct audio_port_config source = {
         .role = AUDIO_PORT_ROLE_SOURCE, .type = AUDIO_PORT_TYPE_DEVICE,
         .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
     };
-    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
-
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
-    status_t status = startAudioSource(&source, &aa, &portId, 0 /*uid*/, true /*internal*/);
+
+    status_t status = startAudioSourceInternal(&source, &aa, &portId, 0 /*uid*/,
+                                       true /*internal*/, true /*isCallRx*/, delayMs);
     ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
     mCallRxSourceClient = mAudioSources.valueFor(portId);
+    ALOGV("%s portdID %d between source %s and sink %s", __func__, portId,
+        mCallRxSourceClient->srcDevice()->toString().c_str(),
+        mCallRxSourceClient->sinkDevice()->toString().c_str());
     ALOGE_IF(mCallRxSourceClient == nullptr,
              "%s failed to start Telephony Rx AudioSource", __func__);
 }
@@ -830,15 +863,26 @@
         const sp<DeviceDescriptor> &srcDevice, const sp<DeviceDescriptor> &sinkDevice,
         uint32_t delayMs)
 {
-    disconnectTelephonyAudioSource(mCallTxSourceClient);
     if (srcDevice == nullptr || sinkDevice == nullptr) {
         ALOGW("%s could not create patch, invalid sink and/or source device(s)", __func__);
         return;
     }
+
+    if (fix_call_audio_patch()) {
+        if (mCallTxSourceClient != nullptr) {
+            if (mCallTxSourceClient->isConnected()
+                    && mCallTxSourceClient->srcDevice()->equals(srcDevice)) {
+                return;
+            }
+            disconnectTelephonyAudioSource(mCallTxSourceClient);
+        }
+    } else {
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
+    }
+
     PatchBuilder patchBuilder;
     patchBuilder.addSource(srcDevice).addSink(sinkDevice);
-    ALOGV("%s between source %s and sink %s", __func__,
-            srcDevice->toString().c_str(), sinkDevice->toString().c_str());
+
     auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
 
@@ -846,7 +890,8 @@
     srcDevice->toAudioPortConfig(&source);
     mCallTxSourceClient = new SourceClientDescriptor(
                 callTxSourceClientPortId, mUidCached, aa, source, srcDevice, AUDIO_STREAM_PATCH,
-                mCommunnicationStrategy, toVolumeSource(aa), true);
+                mCommunnicationStrategy, toVolumeSource(aa), true,
+                false /*isCallRx*/, true /*isCallTx*/);
     mCallTxSourceClient->setPreferredDeviceId(sinkDevice->getId());
 
     audio_patch_handle_t patchHandle = AUDIO_PATCH_HANDLE_NONE;
@@ -854,6 +899,8 @@
                 mCallTxSourceClient, sinkDevice, patchBuilder.patch(), patchHandle, mUidCached,
                 delayMs);
     ALOGE_IF(status != NO_ERROR, "%s() error %d creating TX audio patch", __func__, status);
+    ALOGV("%s portdID %d between source %s and sink %s", __func__, callTxSourceClientPortId,
+        srcDevice->toString().c_str(), sinkDevice->toString().c_str());
     if (status == NO_ERROR) {
         mAudioSources.add(callTxSourceClientPortId, mCallTxSourceClient);
     }
@@ -1549,7 +1596,7 @@
                 (config->channel_mask == desc->getChannelMask()) &&
                 (session == desc->mDirectClientSession)) {
                 desc->mDirectOpenCount++;
-                ALOGV("%s reusing direct output %d for session %d", __func__,
+                ALOGI("%s reusing direct output %d for session %d", __func__,
                     mOutputs.keyAt(i), session);
                 *output = mOutputs.keyAt(i);
                 return NO_ERROR;
@@ -1559,17 +1606,23 @@
 
     if (!profile->canOpenNewIo()) {
         if (!com::android::media::audioserver::direct_track_reprioritization()) {
+            ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
+                  profile->getName().c_str());
             return NAME_NOT_FOUND;
         } else if ((profile->getFlags() & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) != 0) {
             // MMAP gracefully handles lack of an exclusive track resource by mixing
             // above the audio framework. For AAudio to know that the limit is reached,
             // return an error.
+            ALOGW("%s profile %s can't open new mmap output maxOpenCount reached", __func__,
+                  profile->getName().c_str());
             return NAME_NOT_FOUND;
         } else {
             // Close outputs on this profile, if available, to free resources for this request
             for (int i = 0; i < mOutputs.size() && !profile->canOpenNewIo(); i++) {
                 const auto desc = mOutputs.valueAt(i);
                 if (desc->mProfile == profile) {
+                    ALOGV("%s closeOutput %d to prioritize session %d on profile %s", __func__,
+                          desc->mIoHandle, session, profile->getName().c_str());
                     closeOutput(desc->mIoHandle);
                 }
             }
@@ -1578,6 +1631,8 @@
 
     // Unable to close streams to find free resources for this request
     if (!profile->canOpenNewIo()) {
+        ALOGW("%s profile %s can't open new output maxOpenCount reached", __func__,
+              profile->getName().c_str());
         return NAME_NOT_FOUND;
     }
 
@@ -3082,43 +3137,115 @@
         }
     }
 
+    bool isPreemptor = false;
     if (!profile->canOpenNewIo()) {
-        for (size_t i = 0; i < mInputs.size(); ) {
-            sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
-            if (desc->mProfile != profile) {
-                i++;
-                continue;
-            }
-            // if sound trigger, reuse input if used by other sound trigger on same session
-            // else
-            //    reuse input if active client app is not in IDLE state
-            //
-            RecordClientVector clients = desc->clientsList();
-            bool doClose = false;
-            for (const auto& client : clients) {
-                if (isSoundTrigger != client->isSoundTrigger()) {
+        if (com::android::media::audioserver::fix_input_sharing_logic()) {
+            //  First pick best candidate for preemption (there may not be any):
+            //  - Preempt and input if:
+            //     - It has only strictly lower priority use cases than the new client
+            //     - It has equal priority use cases than the new client, was not
+            //     opened thanks to preemption or has been active since opened.
+            //  - Order the preemption candidates by inactive first and priority second
+            sp<AudioInputDescriptor> closeCandidate;
+            int leastCloseRank = INT_MAX;
+            static const int sCloseActive = 0x100;
+
+            for (size_t i = 0; i < mInputs.size(); i++) {
+                sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                if (desc->mProfile != profile) {
                     continue;
                 }
-                if (client->isSoundTrigger()) {
-                    if (session == client->session()) {
+                sp<RecordClientDescriptor> topPrioClient = desc->getHighestPriorityClient();
+                if (topPrioClient == nullptr) {
+                    continue;
+                }
+                int topPrio = source_priority(topPrioClient->source());
+                if (topPrio < source_priority(attributes.source)
+                      || (topPrio == source_priority(attributes.source)
+                          && !desc->isPreemptor())) {
+                    int closeRank = (desc->isActive() ? sCloseActive : 0) + topPrio;
+                    if (closeRank < leastCloseRank) {
+                        leastCloseRank = closeRank;
+                        closeCandidate = desc;
+                    }
+                }
+            }
+
+            if (closeCandidate != nullptr) {
+                closeInput(closeCandidate->mIoHandle);
+                // Mark the new input as being issued from a preemption
+                // so that is will not be preempted later
+                isPreemptor = true;
+            } else {
+                // Then pick the best reusable input (There is always one)
+                // The order of preference is:
+                // 1) active inputs with same use case as the new client
+                // 2) inactive inputs with same use case
+                // 3) active inputs with different use cases
+                // 4) inactive inputs with different use cases
+                sp<AudioInputDescriptor> reuseCandidate;
+                int leastReuseRank = INT_MAX;
+                static const int sReuseDifferentUseCase = 0x100;
+
+                for (size_t i = 0; i < mInputs.size(); i++) {
+                    sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                    if (desc->mProfile != profile) {
+                        continue;
+                    }
+                    int reuseRank = sReuseDifferentUseCase;
+                    for (const auto& client: desc->getClientIterable()) {
+                        if (client->source() == attributes.source) {
+                            reuseRank = 0;
+                            break;
+                        }
+                    }
+                    reuseRank += desc->isActive() ? 0 : 1;
+                    if (reuseRank < leastReuseRank) {
+                        leastReuseRank = reuseRank;
+                        reuseCandidate = desc;
+                    }
+                }
+                return reuseCandidate->mIoHandle;
+            }
+        } else { // fix_input_sharing_logic()
+            for (size_t i = 0; i < mInputs.size(); ) {
+                 sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                 if (desc->mProfile != profile) {
+                     i++;
+                     continue;
+                 }
+                // if sound trigger, reuse input if used by other sound trigger on same session
+                // else
+                //    reuse input if active client app is not in IDLE state
+                //
+                RecordClientVector clients = desc->clientsList();
+                bool doClose = false;
+                for (const auto& client : clients) {
+                    if (isSoundTrigger != client->isSoundTrigger()) {
+                        continue;
+                    }
+                    if (client->isSoundTrigger()) {
+                        if (session == client->session()) {
+                            return desc->mIoHandle;
+                        }
+                        continue;
+                    }
+                    if (client->active() && client->appState() != APP_STATE_IDLE) {
                         return desc->mIoHandle;
                     }
-                    continue;
+                    doClose = true;
                 }
-                if (client->active() && client->appState() != APP_STATE_IDLE) {
-                    return desc->mIoHandle;
+                if (doClose) {
+                    closeInput(desc->mIoHandle);
+                } else {
+                    i++;
                 }
-                doClose = true;
-            }
-            if (doClose) {
-                closeInput(desc->mIoHandle);
-            } else {
-                i++;
             }
         }
     }
 
-    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(profile, mpClientInterface);
+    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
+            profile, mpClientInterface, isPreemptor);
 
     audio_config_t lConfig = AUDIO_CONFIG_INITIALIZER;
     lConfig.sample_rate = profileSamplingRate;
@@ -3424,8 +3551,8 @@
         ALOGW("%s: no group for stream %s, bailing out", __func__, toString(stream).c_str());
         return NO_ERROR;
     }
-    ALOGV("%s: stream %s attributes=%s", __func__,
-          toString(stream).c_str(), toString(attributes).c_str());
+    ALOGV("%s: stream %s attributes=%s, index %d , device 0x%X", __func__,
+          toString(stream).c_str(), toString(attributes).c_str(), index, device);
     return setVolumeIndexForAttributes(attributes, index, device);
 }
 
@@ -3584,7 +3711,9 @@
         if (isVolumeConsistentForCalls(vs, {mCallRxSourceClient->sinkDevice()->type()},
                 isVoiceVolSrc, isBtScoVolSrc, __func__)
                 && (isVoiceVolSrc || isBtScoVolSrc)) {
-            setVoiceVolume(index, curves, isVoiceVolSrc, 0);
+            bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+                    !audio_is_ble_out_device(mCallRxSourceClient->sinkDevice()->type());
+            setVoiceVolume(index, curves, voiceVolumeManagedByHost, 0);
         }
     }
 
@@ -3601,8 +3730,8 @@
     bool hasVoice = hasVoiceStream(volumeCurves.getStreamTypes());
     if (((index < volumeCurves.getVolumeIndexMin()) && !(hasVoice && index == 0)) ||
             (index > volumeCurves.getVolumeIndexMax())) {
-        ALOGD("%s: wrong index %d min=%d max=%d", __FUNCTION__, index,
-              volumeCurves.getVolumeIndexMin(), volumeCurves.getVolumeIndexMax());
+        ALOGE("%s: wrong index %d min=%d max=%d, device 0x%X", __FUNCTION__, index,
+              volumeCurves.getVolumeIndexMin(), volumeCurves.getVolumeIndexMax(), device);
         return BAD_VALUE;
     }
     if (!audio_is_output_device(device)) {
@@ -5095,7 +5224,7 @@
             new SourceClientDescriptor(
                 portId, uid, attributes, *source, srcDevice, AUDIO_STREAM_PATCH,
                 mEngine->getProductStrategyForAttributes(attributes), toVolumeSource(attributes),
-                true);
+                true, false /*isCallRx*/, false /*isCallTx*/);
     sourceDesc->setPreferredDeviceId(sinkDevice->getId());
 
     status_t status =
@@ -5427,7 +5556,7 @@
                         outputDesc->toAudioPortConfig(&srcMixPortConfig, nullptr);
                         // for volume control, we may need a valid stream
                         srcMixPortConfig.ext.mix.usecase.stream =
-                            (!sourceDesc->isInternal() || isCallTxAudioSource(sourceDesc)) ?
+                            (!sourceDesc->isInternal() || sourceDesc->isCallTx()) ?
                                     mEngine->getStreamTypeForAttributes(sourceDesc->attributes()) :
                                     AUDIO_STREAM_PATCH;
                         patchBuilder.addSource(srcMixPortConfig);
@@ -5765,7 +5894,16 @@
 status_t AudioPolicyManager::startAudioSource(const struct audio_port_config *source,
                                               const audio_attributes_t *attributes,
                                               audio_port_handle_t *portId,
-                                              uid_t uid, bool internal)
+                                              uid_t uid) {
+    return startAudioSourceInternal(source, attributes, portId, uid,
+                                    false /*internal*/, false /*isCallRx*/, 0 /*delayMs*/);
+}
+
+status_t AudioPolicyManager::startAudioSourceInternal(const struct audio_port_config *source,
+                                              const audio_attributes_t *attributes,
+                                              audio_port_handle_t *portId,
+                                              uid_t uid, bool internal, bool isCallRx,
+                                              uint32_t delayMs)
 {
     ALOGV("%s", __FUNCTION__);
     *portId = AUDIO_PORT_HANDLE_NONE;
@@ -5798,16 +5936,17 @@
         new SourceClientDescriptor(*portId, uid, *attributes, *source, srcDevice,
                                    mEngine->getStreamTypeForAttributes(*attributes),
                                    mEngine->getProductStrategyForAttributes(*attributes),
-                                   toVolumeSource(*attributes), internal);
+                                   toVolumeSource(*attributes), internal, isCallRx, false);
 
-    status_t status = connectAudioSource(sourceDesc);
+    status_t status = connectAudioSource(sourceDesc, delayMs);
     if (status == NO_ERROR) {
         mAudioSources.add(*portId, sourceDesc);
     }
     return status;
 }
 
-status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc)
+status_t AudioPolicyManager::connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+                                                uint32_t delayMs)
 {
     ALOGV("%s handle %d", __FUNCTION__, sourceDesc->portId());
 
@@ -5833,7 +5972,7 @@
     audio_patch_handle_t handle = AUDIO_PATCH_HANDLE_NONE;
 
     return connectAudioSourceToSink(
-                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, 0 /*delayMs*/);
+                sourceDesc, sinkDevice, patchBuilder.patch(), handle, mUidCached, delayMs);
 }
 
 status_t AudioPolicyManager::stopAudioSource(audio_port_handle_t portId)
@@ -5891,7 +6030,8 @@
 float AudioPolicyManager::getStreamVolumeDB(
         audio_stream_type_t stream, int index, audio_devices_t device)
 {
-    return computeVolume(getVolumeCurves(stream), toVolumeSource(stream), index, {device});
+    return computeVolume(getVolumeCurves(stream), toVolumeSource(stream), index,
+                         {device}, /* adjustAttenuation= */false);
 }
 
 status_t AudioPolicyManager::getSurroundFormats(unsigned int *numSurroundFormats,
@@ -6562,6 +6702,14 @@
             if (!mConfig->getOutputDevices().contains(supportedDevice)) {
                 continue;
             }
+
+            if (outProfile->isMmap() && !outProfile->hasDynamicAudioProfile()
+                && availProfileDevices.areAllDevicesAttached()) {
+                ALOGV("%s skip opening output for mmap profile %s", __func__,
+                        outProfile->getTagName().c_str());
+                continue;
+            }
+
             sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
                                                                                  mpClientInterface);
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
@@ -6621,19 +6769,27 @@
                     __func__, inProfile->getTagName().c_str());
                 continue;
             }
-            sp<AudioInputDescriptor> inputDesc =
-                    new AudioInputDescriptor(inProfile, mpClientInterface);
+
+            if (inProfile->isMmap() && !inProfile->hasDynamicAudioProfile()
+                && availProfileDevices.areAllDevicesAttached()) {
+                ALOGV("%s skip opening input for mmap profile %s", __func__,
+                        inProfile->getTagName().c_str());
+                continue;
+            }
+
+            sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
+                    inProfile, mpClientInterface, false /*isPreemptor*/);
 
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
             status_t status = inputDesc->open(nullptr,
                                               availProfileDevices.itemAt(0),
                                               AUDIO_SOURCE_MIC,
-                                              AUDIO_INPUT_FLAG_NONE,
+                                              (audio_input_flags_t) inProfile->getFlags(),
                                               &input);
             if (status != NO_ERROR) {
-                ALOGW("Cannot open input stream for device %s on hw module %s",
-                      availProfileDevices.toString().c_str(),
-                      hwModule->getName());
+                ALOGW("%s: Cannot open input stream for device %s for profile %s on hw module %s",
+                        __func__, availProfileDevices.toString().c_str(),
+                        inProfile->getTagName().c_str(), hwModule->getName());
                 continue;
             }
             for (const auto &device : availProfileDevices) {
@@ -6741,8 +6897,8 @@
                 sp<IOProfile> profile = hwModule->getOutputProfiles()[j];
                 if (profile->supportsDevice(device)) {
                     profiles.add(profile);
-                    ALOGV("checkOutputsForDevice(): adding profile %zu from module %s",
-                          j, hwModule->getName());
+                    ALOGV("%s(): adding profile %s from module %s",
+                            __func__, profile->getTagName().c_str(), hwModule->getName());
                 }
             }
         }
@@ -6775,7 +6931,11 @@
             if (j != outputs.size()) {
                 continue;
             }
-
+            if (profile->isMmap() && !profile->hasDynamicAudioProfile()) {
+                ALOGV("%s skip opening output for mmap profile %s",
+                      __func__, profile->getTagName().c_str());
+                continue;
+            }
             if (!profile->canOpenNewIo()) {
                 ALOGW("Max Output number %u already opened for this profile %s",
                       profile->maxOpenCount, profile->getTagName().c_str());
@@ -6836,9 +6996,8 @@
                 if (!profile->supportsDevice(device)) {
                     continue;
                 }
-                ALOGV("checkOutputsForDevice(): "
-                        "clearing direct output profile %zu on module %s",
-                        j, hwModule->getName());
+                ALOGV("%s(): clearing direct output profile %s on module %s",
+                        __func__, profile->getTagName().c_str(), hwModule->getName());
                 profile->clearAudioProfiles();
                 if (!profile->hasDynamicAudioProfile()) {
                     continue;
@@ -6893,8 +7052,8 @@
 
                 if (profile->supportsDevice(device)) {
                     profiles.add(profile);
-                    ALOGV("checkInputsForDevice(): adding profile %zu from module %s",
-                          profile_index, hwModule->getName());
+                    ALOGV("%s : adding profile %s from module %s", __func__,
+                          profile->getTagName().c_str(), hwModule->getName());
                 }
             }
         }
@@ -6926,15 +7085,22 @@
                 continue;
             }
 
+            if (profile->isMmap() && !profile->hasDynamicAudioProfile()) {
+                ALOGV("%s skip opening input for mmap profile %s",
+                      __func__, profile->getTagName().c_str());
+                continue;
+            }
             if (!profile->canOpenNewIo()) {
-                ALOGW("Max Input number %u already opened for this profile %s",
-                      profile->maxOpenCount, profile->getTagName().c_str());
+                ALOGW("%s Max Input number %u already opened for this profile %s",
+                      __func__, profile->maxOpenCount, profile->getTagName().c_str());
                 continue;
             }
 
-            desc = new AudioInputDescriptor(profile, mpClientInterface);
+            desc = new AudioInputDescriptor(profile, mpClientInterface, false  /*isPreemptor*/);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
-            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC, AUDIO_INPUT_FLAG_NONE, &input);
+            ALOGV("%s opening input for profile %s", __func__, profile->getTagName().c_str());
+            status = desc->open(nullptr, device, AUDIO_SOURCE_MIC,
+                                (audio_input_flags_t) profile->getFlags(), &input);
 
             if (status == NO_ERROR) {
                 const String8& address = String8(device->address().c_str());
@@ -6945,7 +7111,8 @@
                 }
                 updateAudioProfiles(device, input, profile);
                 if (!profile->hasValidAudioProfile()) {
-                    ALOGW("checkInputsForDevice() direct input missing param");
+                    ALOGW("%s direct input missing param for profile %s", __func__,
+                            profile->getTagName().c_str());
                     desc->close();
                     input = AUDIO_IO_HANDLE_NONE;
                 }
@@ -6956,18 +7123,20 @@
             } // endif input != 0
 
             if (input == AUDIO_IO_HANDLE_NONE) {
-                ALOGW("%s could not open input for device %s", __func__,
-                       device->toString().c_str());
+                ALOGW("%s could not open input for device %s on profile %s", __func__,
+                       device->toString().c_str(), profile->getTagName().c_str());
                 profiles.removeAt(profile_index);
                 profile_index--;
             } else {
                 if (audio_device_is_digital(device->type())) {
                     device->importAudioPortAndPickAudioProfile(profile);
                 }
-                ALOGV("checkInputsForDevice(): adding input %d", input);
+                ALOGV("%s: adding input %d for profile %s", __func__,
+                        input, profile->getTagName().c_str());
 
                 if (checkCloseInput(desc)) {
-                    ALOGV("%s closing input %d", __func__, input);
+                    ALOGV("%s: closing input %d for profile %s", __func__,
+                            input, profile->getTagName().c_str());
                     closeInput(input);
                 }
             }
@@ -6986,8 +7155,8 @@
                  profile_index++) {
                 sp<IOProfile> profile = hwModule->getInputProfiles()[profile_index];
                 if (profile->supportsDevice(device)) {
-                    ALOGV("checkInputsForDevice(): clearing direct input profile %zu on module %s",
-                            profile_index, hwModule->getName());
+                    ALOGV("%s: clearing direct input profile %s on module %s", __func__,
+                            profile->getTagName().c_str(), hwModule->getName());
                     profile->clearAudioProfiles();
                 }
             }
@@ -7178,8 +7347,8 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc != nullptr && followsSameRouting(attr, sourceDesc->attributes())
                 && sourceDesc->getPatchHandle() == AUDIO_PATCH_HANDLE_NONE
-                && !isCallRxAudioSource(sourceDesc) && !sourceDesc->isInternal()) {
-            connectAudioSource(sourceDesc);
+                && !sourceDesc->isCallRx() && !sourceDesc->isInternal()) {
+            connectAudioSource(sourceDesc, 0 /*delayMs*/);
         }
     }
 }
@@ -7285,8 +7454,8 @@
                 }
             }
             sp<SourceClientDescriptor> source = getSourceForAttributesOnOutput(srcOut, attr);
-            if (source != nullptr && !isCallRxAudioSource(source) && !source->isInternal()) {
-                connectAudioSource(source);
+            if (source != nullptr && !source->isCallRx() && !source->isInternal()) {
+                connectAudioSource(source, 0 /*delayMs*/);
             }
         }
 
@@ -8045,7 +8214,7 @@
             VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
             if (vsToDriveAbs == volumeSource) {
                 // attenuation is applied by the abs volume controller
-                return volumeDbMax;
+                return (index != 0) ? volumeDbMax : volumeDb;
             } else {
                 IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
                 int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
@@ -8069,9 +8238,15 @@
                                         VolumeSource volumeSource,
                                         int index,
                                         const DeviceTypeSet& deviceTypes,
+                                        bool adjustAttenuation,
                                         bool computeInternalInteraction)
 {
-    float volumeDb = adjustDeviceAttenuationForAbsVolume(curves, volumeSource, index, deviceTypes);
+    float volumeDb;
+    if (adjustAttenuation) {
+        volumeDb = adjustDeviceAttenuationForAbsVolume(curves, volumeSource, index, deviceTypes);
+    } else {
+        volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
+    }
     ALOGV("%s volume source %d, index %d,  devices %s, compute internal %b ", __func__,
           volumeSource, index, dumpDeviceTypes(deviceTypes).c_str(), computeInternalInteraction);
 
@@ -8092,7 +8267,8 @@
             mOutputs.isActive(ringVolumeSrc, 0)) {
         auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
         const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes,
-                /* computeInternalInteraction= */ false);
+                                                 adjustAttenuation,
+                                                 /* computeInternalInteraction= */false);
         return ringVolumeDb - 4 > volumeDb ? ringVolumeDb - 4 : volumeDb;
     }
 
@@ -8110,7 +8286,7 @@
         int voiceVolumeIndex = voiceCurves.getVolumeIndex(deviceTypes);
         const float maxVoiceVolDb =
                 computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes,
-                              /* computeInternalInteraction= */ false)
+                        adjustAttenuation, /* computeInternalInteraction= */false)
                 + IN_CALL_EARPIECE_HEADROOM_DB;
         // FIXME: Workaround for call screening applications until a proper audio mode is defined
         // to support this scenario : Exempt the RING stream from the audio cap if the audio was
@@ -8163,7 +8339,8 @@
                                              musicVolumeSrc,
                                              musicCurves.getVolumeIndex(musicDevice),
                                              musicDevice,
-                                             /* computeInternalInteraction= */ false);
+                                              adjustAttenuation,
+                                              /* computeInternalInteraction= */ false);
             float minVolDb = (musicVolDb > SONIFICATION_HEADSET_VOLUME_MIN_DB) ?
                         musicVolDb : SONIFICATION_HEADSET_VOLUME_MIN_DB;
             if (volumeDb > minVolDb) {
@@ -8172,9 +8349,10 @@
             }
             if (Volume::getDeviceForVolume(deviceTypes) != AUDIO_DEVICE_OUT_SPEAKER
                     &&  !Intersection(deviceTypes, {AUDIO_DEVICE_OUT_BLUETOOTH_A2DP,
-                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES}).empty()) {
-                // on A2DP, also ensure notification volume is not too low compared to media when
-                // intended to be played
+                        AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES,
+                        AUDIO_DEVICE_OUT_BLE_HEADSET}).empty()) {
+                // on A2DP/BLE, also ensure notification volume is not too low compared to media
+                // when intended to be played.
                 if ((volumeDb > -96.0f) &&
                         (musicVolDb - SONIFICATION_A2DP_MAX_MEDIA_DIFF_DB > volumeDb)) {
                     ALOGV("%s increasing volume for volume source=%d device=%s from %f to %f",
@@ -8256,9 +8434,10 @@
 
     float volumeDb = computeVolume(curves, volumeSource, index, deviceTypes);
     if (outputDesc->isFixedVolume(deviceTypes) ||
-            // Force VoIP volume to max for bluetooth SCO device except if muted
+            // Force VoIP volume to max for bluetooth SCO/BLE device except if muted
             (index != 0 && (isVoiceVolSrc || isBtScoVolSrc) &&
-                    isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device))) {
+                    (isSingleDeviceType(deviceTypes, audio_is_bluetooth_out_sco_device)
+                    || isSingleDeviceType(deviceTypes, audio_is_ble_out_device)))) {
         volumeDb = 0.0f;
     }
     const bool muted = (index == 0) && (volumeDb != 0.0f);
@@ -8266,17 +8445,19 @@
             deviceTypes, delayMs, force, isVoiceVolSrc);
 
     if (outputDesc == mPrimaryOutput && (isVoiceVolSrc || isBtScoVolSrc)) {
-        setVoiceVolume(index, curves, isVoiceVolSrc, delayMs);
+        bool voiceVolumeManagedByHost = isVoiceVolSrc &&
+                !isSingleDeviceType(deviceTypes, audio_is_ble_out_device);
+        setVoiceVolume(index, curves, voiceVolumeManagedByHost, delayMs);
     }
     return NO_ERROR;
 }
 
 void AudioPolicyManager::setVoiceVolume(
-        int index, IVolumeCurves &curves, bool isVoiceVolSrc, int delayMs) {
+        int index, IVolumeCurves &curves, bool voiceVolumeManagedByHost, int delayMs) {
     float voiceVolume;
-    // Force voice volume to max or mute for Bluetooth SCO as other attenuations are managed
+    // Force voice volume to max or mute for Bluetooth SCO/BLE as other attenuations are managed
     // by the headset
-    if (isVoiceVolSrc) {
+    if (voiceVolumeManagedByHost) {
         voiceVolume = (float)index/(float)curves.getVolumeIndexMax();
     } else {
         voiceVolume = index == 0 ? 0.0 : 1.0;
@@ -8454,7 +8635,7 @@
         sp<SourceClientDescriptor> sourceDesc = mAudioSources.valueAt(i);
         if (sourceDesc->isConnected() && (sourceDesc->srcDevice()->equals(deviceDesc) ||
                                           sourceDesc->sinkDevice()->equals(deviceDesc))
-                && !isCallRxAudioSource(sourceDesc)) {
+                && !sourceDesc->isCallRx()) {
             disconnectAudioSource(sourceDesc);
         }
     }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 011e867..98853ce 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -345,8 +345,7 @@
         virtual status_t startAudioSource(const struct audio_port_config *source,
                                           const audio_attributes_t *attributes,
                                           audio_port_handle_t *portId,
-                                          uid_t uid,
-                                          bool internal = false);
+                                          uid_t uid);
         virtual status_t stopAudioSource(audio_port_handle_t portId);
 
         virtual status_t setMasterMono(bool mono);
@@ -591,6 +590,8 @@
          * @param index index to match in the volume curves for the calculation
          * @param deviceTypes devices that should be considered in the volume curves for the
          *        calculation
+         * @param adjustAttenuation boolean indicating whether we should adjust the value to
+         *        avoid double attenuation when controlling an avrcp device
          * @param computeInternalInteraction boolean indicating whether recursive volume computation
          *        should continue within the volume computation. Defaults to {@code true} so the
          *        volume interactions can be computed. Calls within the method should always set the
@@ -599,6 +600,7 @@
          */
         virtual float computeVolume(IVolumeCurves &curves, VolumeSource volumeSource,
                                int index, const DeviceTypeSet& deviceTypes,
+                               bool adjustAttenuation = true,
                                bool computeInternalInteraction = true);
 
         // rescale volume index from srcStream within range of dstStream
@@ -705,15 +707,7 @@
         void updateCallAndOutputRouting(bool forceVolumeReeval = true, uint32_t delayMs = 0,
                 bool skipDelays = false);
 
-        bool isCallRxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallRxSourceClient != nullptr && source == mCallRxSourceClient;
-        }
-
-        bool isCallTxAudioSource(const sp<SourceClientDescriptor> &source) {
-            return mCallTxSourceClient != nullptr && source == mCallTxSourceClient;
-        }
-
-        void connectTelephonyRxAudioSource();
+        void connectTelephonyRxAudioSource(uint32_t delayMs);
 
         void disconnectTelephonyAudioSource(sp<SourceClientDescriptor> &clientDesc);
 
@@ -938,7 +932,8 @@
 
         status_t hasPrimaryOutput() const { return mPrimaryOutput != 0; }
 
-        status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
+        status_t connectAudioSource(const sp<SourceClientDescriptor>& sourceDesc,
+                                    uint32_t delayMs);
         status_t disconnectAudioSource(const sp<SourceClientDescriptor>& sourceDesc);
 
         status_t connectAudioSourceToSink(const sp<SourceClientDescriptor>& sourceDesc,
@@ -976,6 +971,13 @@
         void checkLeBroadcastRoutes(bool wasUnicastActive,
                 sp<SwAudioOutputDescriptor> ignoredOutput, uint32_t delayMs);
 
+        status_t startAudioSourceInternal(const struct audio_port_config *source,
+                                          const audio_attributes_t *attributes,
+                                          audio_port_handle_t *portId,
+                                          uid_t uid,
+                                          bool internal,
+                                          bool isCallRx,
+                                          uint32_t delayMs);
         const uid_t mUidCached;                         // AID_AUDIOSERVER
         sp<const AudioPolicyConfig> mConfig;
         EngineInstance mEngine;                         // Audio Policy Engine instance
diff --git a/services/audiopolicy/permission/Android.bp b/services/audiopolicy/permission/Android.bp
index d5f59a0..cfbeaae 100644
--- a/services/audiopolicy/permission/Android.bp
+++ b/services/audiopolicy/permission/Android.bp
@@ -34,8 +34,8 @@
     shared_libs: [
         "libbase",
         "libbinder",
-        "libutils",
         "liblog",
+        "libutils",
     ],
 
     host_supported: true,
@@ -43,21 +43,21 @@
         integer_overflow: true,
     },
     cflags: [
-        "-Wall",
-        "-Wdeprecated",
-        "-Wextra",
-        "-Werror=format",
-        "-Wextra-semi",
-        "-Wthread-safety",
-        "-Wconditional-uninitialized",
-        "-Wimplicit-fallthrough",
-        "-Wreorder-init-list",
-        "-Werror=reorder-init-list",
-        "-Wshadow-all",
-        "-Wunreachable-code-aggressive",
-        "-Werror",
         "-DANDROID_BASE_UNIQUE_FD_DISABLE_IMPLICIT_CONVERSION",
         "-DANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION",
+        "-Wall",
+        "-Wconditional-uninitialized",
+        "-Wdeprecated",
+        "-Werror",
+        "-Werror=format",
+        "-Werror=reorder-init-list",
+        "-Wextra",
+        "-Wextra-semi",
+        "-Wimplicit-fallthrough",
+        "-Wreorder-init-list",
+        "-Wshadow-all",
+        "-Wthread-safety",
+        "-Wunreachable-code-aggressive",
     ],
     tidy: true,
     tidy_checks: [
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 8ba2814..768cd07 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -1576,19 +1576,6 @@
     return Status::ok();
 }
 
-template <typename Port>
-void anonymizePortBluetoothAddress(Port *port) {
-    if (port->type != AUDIO_PORT_TYPE_DEVICE) {
-        return;
-    }
-    if (!(audio_is_a2dp_device(port->ext.device.type)
-            || audio_is_ble_device(port->ext.device.type)
-            || audio_is_bluetooth_sco_device(port->ext.device.type)
-            || audio_is_hearing_aid_out_device(port->ext.device.type))) {
-        return;
-    }
-    anonymizeBluetoothAddress(port->ext.device.address);
-}
 
 Status AudioPolicyService::listAudioPorts(media::AudioPortRole roleAidl,
                                           media::AudioPortType typeAidl, Int* count,
@@ -1607,27 +1594,14 @@
     std::unique_ptr<audio_port_v7[]> ports(new audio_port_v7[num_ports]);
     unsigned int generation;
 
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
+    audio_utils::lock_guard _l(mMutex);
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
     AutoCallerClear acc;
-    {
-        audio_utils::lock_guard _l(mMutex);
-        if (mAudioPolicyManager == NULL) {
-            return binderStatusFromStatusT(NO_INIT);
-        }
-        // AudioPolicyManager->listAudioPorts makes a deep copy of port structs into ports
-        // so it is safe to access after releasing the mutex
-        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-                mAudioPolicyManager->listAudioPorts(
-                        role, type, &num_ports, ports.get(), &generation)));
-        numPortsReq = std::min(numPortsReq, num_ports);
-    }
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        for (size_t i = 0; i < numPortsReq; ++i) {
-            anonymizePortBluetoothAddress(&ports[i]);
-        }
-    }
-
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->listAudioPorts(role, type, &num_ports, ports.get(), &generation)));
+    numPortsReq = std::min(numPortsReq, num_ports);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(ports.get(), ports.get() + numPortsReq, std::back_inserter(*portsAidl),
                          legacy2aidl_audio_port_v7_AudioPortFw)));
@@ -1650,24 +1624,12 @@
 Status AudioPolicyService::getAudioPort(int portId,
                                         media::AudioPortFw* _aidl_return) {
     audio_port_v7 port{ .id = portId };
-
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
+    audio_utils::lock_guard _l(mMutex);
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
     AutoCallerClear acc;
-
-    {
-        audio_utils::lock_guard _l(mMutex);
-        if (mAudioPolicyManager == NULL) {
-            return binderStatusFromStatusT(NO_INIT);
-        }
-        // AudioPolicyManager->getAudioPort makes a deep copy of the port struct into port
-        // so it is safe to access after releasing the mutex
-        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
-    }
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        anonymizePortBluetoothAddress(&port);
-    }
-
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(mAudioPolicyManager->getAudioPort(&port)));
     *_aidl_return = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_audio_port_v7_AudioPortFw(port));
     return Status::ok();
 }
@@ -1725,32 +1687,14 @@
     std::unique_ptr<audio_patch[]> patches(new audio_patch[num_patches]);
     unsigned int generation;
 
-    const AttributionSourceState attributionSource = getCallingAttributionSource();
+    audio_utils::lock_guard _l(mMutex);
+    if (mAudioPolicyManager == NULL) {
+        return binderStatusFromStatusT(NO_INIT);
+    }
     AutoCallerClear acc;
-
-    {
-        audio_utils::lock_guard _l(mMutex);
-        if (mAudioPolicyManager == NULL) {
-            return binderStatusFromStatusT(NO_INIT);
-        }
-        // AudioPolicyManager->listAudioPatches makes a deep copy of patches structs into patches
-        // so it is safe to access after releasing the mutex
-        RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
-                mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
-        numPatchesReq = std::min(numPatchesReq, num_patches);
-    }
-
-    if (mustAnonymizeBluetoothAddress(attributionSource, String16(__func__))) {
-        for (size_t i = 0; i < numPatchesReq; ++i) {
-            for (size_t j = 0; j < patches[i].num_sources; ++j) {
-                anonymizePortBluetoothAddress(&patches[i].sources[j]);
-            }
-            for (size_t j = 0; j < patches[i].num_sinks; ++j) {
-                anonymizePortBluetoothAddress(&patches[i].sinks[j]);
-            }
-        }
-    }
-
+    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
+            mAudioPolicyManager->listAudioPatches(&num_patches, patches.get(), &generation)));
+    numPatchesReq = std::min(numPatchesReq, num_patches);
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(
             convertRange(patches.get(), patches.get() + numPatchesReq,
                          std::back_inserter(*patchesAidl), legacy2aidl_audio_patch_AudioPatchFw)));
diff --git a/services/audiopolicy/tests/Android.bp b/services/audiopolicy/tests/Android.bp
index dc61115..4006489 100644
--- a/services/audiopolicy/tests/Android.bp
+++ b/services/audiopolicy/tests/Android.bp
@@ -12,6 +12,7 @@
     name: "audiopolicy_tests",
 
     defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
         "latest_android_media_audio_common_types_cpp_static",
     ],
 
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index ba0a1eb..d1dd1e9 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -72,6 +72,7 @@
         *input = mNextIoHandle++;
         mOpenedInputs.insert(*input);
         ALOGD("%s: opened input %d", __func__, *input);
+        mOpenInputCallsCount++;
         return NO_ERROR;
     }
 
@@ -86,6 +87,7 @@
             return BAD_VALUE;
         }
         ALOGD("%s: closed input %d", __func__, input);
+        mCloseInputCallsCount++;
         return NO_ERROR;
     }
 
@@ -260,6 +262,18 @@
         auto it = mTracksInternalMute.find(portId);
         return it == mTracksInternalMute.end() ? false : it->second;
     }
+    void resetInputApiCallsCounters() {
+        mOpenInputCallsCount = 0;
+        mCloseInputCallsCount = 0;
+    }
+
+    size_t getCloseInputCallsCount() const {
+        return mCloseInputCallsCount;
+    }
+
+    size_t getOpenInputCallsCount() const {
+        return mOpenInputCallsCount;
+    }
 
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
@@ -275,6 +289,8 @@
     std::set<audio_channel_mask_t> mSupportedChannelMasks;
     std::map<audio_port_handle_t, bool> mTracksInternalMute;
     std::set<audio_io_handle_t> mOpenedInputs;
+    size_t mOpenInputCallsCount = 0;
+    size_t mCloseInputCallsCount = 0;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 4c98687..07aad0c 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -3830,3 +3830,92 @@
         testing::Values(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
                         AUDIO_USAGE_ALARM)
 );
+
+class AudioPolicyManagerInputPreemptionTest : public AudioPolicyManagerTestWithConfigurationFile {
+};
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        SameSessionReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        LesserPriorityReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    attr.source = AUDIO_SOURCE_VOICE_RECOGNITION;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        HigherPriorityPreemptsInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    attr.source = AUDIO_SOURCE_CAMCORDER;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(2, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(1, mClient->getCloseInputCallsCount());
+    EXPECT_NE(input1, input2);
+}
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index bbc19fa..67e99f2 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -30,7 +30,7 @@
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                              samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
-                <mixPort name="primary input" role="sink">
+                <mixPort name="primary input" role="sink"  maxActiveCount="1" maxOpenCount="1">
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                              samplingRates="48000"
                              channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 7a2b434..157f084 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -513,7 +513,7 @@
 void CameraService::onDeviceStatusChanged(const std::string& cameraId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, newStatus=%d", __FUNCTION__,
-            cameraId.c_str(), newHalStatus);
+            cameraId.c_str(), eToI(newHalStatus));
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -537,7 +537,8 @@
     StatusInternal oldStatus = state->getStatus();
 
     if (oldStatus == newStatus) {
-        ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__, newStatus);
+        ALOGE("%s: State transition to the same status %#x not allowed", __FUNCTION__,
+                eToI(newStatus));
         return;
     }
 
@@ -580,7 +581,7 @@
         const std::string& physicalId,
         CameraDeviceStatus newHalStatus) {
     ALOGI("%s: Status changed for cameraId=%s, physicalCameraId=%s, newStatus=%d",
-            __FUNCTION__, id.c_str(), physicalId.c_str(), newHalStatus);
+            __FUNCTION__, id.c_str(), physicalId.c_str(), eToI(newHalStatus));
 
     StatusInternal newStatus = mapToInternal(newHalStatus);
 
@@ -596,7 +597,7 @@
     if (logicalCameraStatus != StatusInternal::PRESENT &&
             logicalCameraStatus != StatusInternal::NOT_AVAILABLE) {
         ALOGE("%s: Physical camera id %s status %d change for an invalid logical camera state %d",
-                __FUNCTION__, physicalId.c_str(), newHalStatus, logicalCameraStatus);
+                __FUNCTION__, physicalId.c_str(), eToI(newHalStatus), eToI(logicalCameraStatus));
         return;
     }
 
@@ -688,7 +689,7 @@
 void CameraService::onTorchStatusChangedLocked(const std::string& cameraId,
         TorchModeStatus newStatus, SystemCameraKind systemCameraKind) {
     ALOGI("%s: Torch status changed for cameraId=%s, newStatus=%d",
-            __FUNCTION__, cameraId.c_str(), newStatus);
+            __FUNCTION__, cameraId.c_str(), eToI(newStatus));
 
     TorchModeStatus status;
     status_t res = getTorchStatusLocked(cameraId, &status);
@@ -783,12 +784,13 @@
     return true;
 }
 
-Status CameraService::getNumberOfCameras(int32_t type, int32_t deviceId, int32_t devicePolicy,
+Status CameraService::getNumberOfCameras(int32_t type,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         int32_t* numCameras) {
     ATRACE_CALL();
-    if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+    if (vd_flags::camera_device_awareness() && (clientAttribution.deviceId != kDefaultDeviceId)
             && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
-        *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(deviceId);
+        *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(clientAttribution.deviceId);
         return Status::ok();
     }
 
@@ -821,7 +823,7 @@
 }
 
 Status CameraService::createDefaultRequest(const std::string& unresolvedCameraId, int templateId,
-        int32_t deviceId, int32_t devicePolicy,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /* out */
         hardware::camera2::impl::CameraMetadataNative* request) {
     ATRACE_CALL();
@@ -836,11 +838,11 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -885,7 +887,7 @@
 Status CameraService::isSessionConfigurationWithParametersSupported(
         const std::string& unresolvedCameraId, int targetSdkVersion,
         const SessionConfiguration& sessionConfiguration,
-        int32_t deviceId, int32_t devicePolicy,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/ bool* supported) {
     ATRACE_CALL();
 
@@ -899,11 +901,11 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -926,8 +928,13 @@
                                 SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                                         mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
 
-    return isSessionConfigurationWithParametersSupportedUnsafe(cameraId, sessionConfiguration,
-                                                               overrideForPerfClass, supported);
+    auto ret = isSessionConfigurationWithParametersSupportedUnsafe(cameraId,
+            sessionConfiguration, overrideForPerfClass, supported);
+    if (flags::analytics_24q3()) {
+        mCameraServiceProxyWrapper->logFeatureCombinationQuery(cameraId,
+                getCallingUid(), sessionConfiguration, ret);
+    }
+    return ret;
 }
 
 Status CameraService::isSessionConfigurationWithParametersSupportedUnsafe(
@@ -977,7 +984,8 @@
 
 Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
         int targetSdkVersion, int rotationOverride,
-        const SessionConfiguration& sessionConfiguration, int32_t deviceId, int32_t devicePolicy,
+        const SessionConfiguration& sessionConfiguration,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/ CameraMetadata* outMetadata) {
     ATRACE_CALL();
 
@@ -994,11 +1002,11 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-                                                                  devicePolicy);
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                                       unresolvedCameraId.c_str(), deviceId);
+                                       unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1071,7 +1079,12 @@
             }
     }
 
-    return filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
+    Status res = filterSensitiveMetadataIfNeeded(cameraId, outMetadata);
+    if (flags::analytics_24q3()) {
+        mCameraServiceProxyWrapper->logSessionCharacteristicsQuery(cameraId,
+                getCallingUid(), sessionConfiguration, res);
+    }
+    return res;
 }
 
 Status CameraService::filterSensitiveMetadataIfNeeded(
@@ -1195,14 +1208,16 @@
     return mVirtualDeviceCameraIdMapper.getActualCameraId(deviceId, inputCameraId);
 }
 
-Status CameraService::getCameraInfo(int cameraId,  int rotationOverride, int32_t deviceId,
-        int32_t devicePolicy, CameraInfo* cameraInfo) {
+Status CameraService::getCameraInfo(int cameraId,  int rotationOverride,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
-    std::string cameraIdStr = cameraIdIntToStrLocked(cameraId, deviceId, devicePolicy);
+    std::string cameraIdStr =
+            cameraIdIntToStrLocked(cameraId, clientAttribution.deviceId, devicePolicy);
     if (cameraIdStr.empty()) {
         std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
-                cameraId, deviceId);
+                cameraId, clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1276,8 +1291,8 @@
 }
 
 Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
-        int targetSdkVersion, int rotationOverride, int32_t deviceId, int32_t devicePolicy,
-        CameraMetadata* cameraInfo) {
+        int targetSdkVersion, int rotationOverride, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
 
     if (!cameraInfo) {
@@ -1292,11 +1307,11 @@
                 "Camera subsystem is not available");;
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1329,16 +1344,17 @@
     return filterSensitiveMetadataIfNeeded(cameraId, cameraInfo);
 }
 
-Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId, int32_t deviceId,
+Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
+        const AttributionSourceState& clientAttribution,
         int32_t devicePolicy, int32_t* torchStrength) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1542,7 +1558,7 @@
             serviceStatus = ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON;
             break;
         default:
-            ALOGW("Unknown new flash status: %d", status);
+            ALOGW("Unknown new flash status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1560,7 +1576,7 @@
             serviceStatus = StatusInternal::ENUMERATING;
             break;
         default:
-            ALOGW("Unknown new HAL device status: %d", status);
+            ALOGW("Unknown new HAL device status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -1584,7 +1600,7 @@
             serviceStatus = ICameraServiceListener::STATUS_UNKNOWN;
             break;
         default:
-            ALOGW("Unknown new internal device status: %d", status);
+            ALOGW("Unknown new internal device status: %d", eToI(status));
     }
     return serviceStatus;
 }
@@ -2118,35 +2134,35 @@
 Status CameraService::connect(
         const sp<ICameraClient>& cameraClient,
         int api1CameraId,
-        const std::string& clientPackageName,
-        int clientUid,
-        int clientPid,
         int targetSdkVersion,
         int rotationOverride,
         bool forceSlowJpegMode,
-        int32_t deviceId,
+        const AttributionSourceState& clientAttribution,
         int32_t devicePolicy,
         /*out*/
         sp<ICamera>* device) {
     ATRACE_CALL();
     Status ret = Status::ok();
 
-    std::string cameraIdStr = cameraIdIntToStr(api1CameraId, deviceId, devicePolicy);
+    std::string cameraIdStr =
+            cameraIdIntToStr(api1CameraId, clientAttribution.deviceId, devicePolicy);
     if (cameraIdStr.empty()) {
         std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
-                api1CameraId, deviceId);
+                api1CameraId, clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
-            clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
+            clientAttribution.packageName.value_or(""), /*systemNativeClient*/ false, {},
+            clientAttribution.uid, clientAttribution.pid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
             rotationOverride, forceSlowJpegMode, cameraIdStr, /*out*/client);
 
     if (!ret.isOk()) {
-        logRejected(cameraIdStr, getCallingPid(), clientPackageName, toStdString(ret.toString8()));
+        logRejected(cameraIdStr, getCallingPid(), clientAttribution.packageName.value_or(""),
+                toStdString(ret.toString8()));
         return ret;
     }
 
@@ -2223,17 +2239,15 @@
 Status CameraService::connectDevice(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const std::string& unresolvedCameraId,
-        const std::string& clientPackageName,
-        const std::optional<std::string>& clientFeatureId,
-        int clientUid, int oomScoreOffset, int targetSdkVersion,
-        int rotationOverride, int32_t deviceId, int32_t devicePolicy,
+        int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
     ATRACE_CALL();
     RunThreadWithRealtimePriority priorityBump;
     Status ret = Status::ok();
     sp<CameraDeviceClient> client = nullptr;
-    std::string clientPackageNameAdj = clientPackageName;
+    std::string clientPackageNameAdj = clientAttribution.packageName.value_or("");
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
     bool systemNativeClient = false;
@@ -2243,11 +2257,11 @@
         systemNativeClient = true;
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -2262,8 +2276,8 @@
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
-    userid_t clientUserId = multiuser_get_user_id(clientUid);
-    if (clientUid == USE_CALLING_UID) {
+    userid_t clientUserId = multiuser_get_user_id(clientAttribution.uid);
+    if (clientAttribution.uid == USE_CALLING_UID) {
         clientUserId = multiuser_get_user_id(callingUid);
     }
 
@@ -2289,10 +2303,10 @@
     }
 
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
-            cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
-            clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, rotationOverride, /*forceSlowJpegMode*/false, unresolvedCameraId,
-            /*out*/client);
+            cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient,
+            clientAttribution.attributionTag, clientAttribution.uid, USE_CALLING_PID, API_2,
+            /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
+            /*forceSlowJpegMode*/false, unresolvedCameraId, /*out*/client);
 
     if (!ret.isOk()) {
         logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
@@ -2815,8 +2829,8 @@
 }
 
 Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
-        int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
-        int32_t devicePolicy) {
+        int32_t torchStrength, const sp<IBinder>& clientBinder,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2827,11 +2841,11 @@
     }
 
     int uid = getCallingUid();
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -2953,7 +2967,8 @@
 }
 
 Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
-        const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy) {
+        const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2964,11 +2979,11 @@
     }
 
     int uid = getCallingUid();
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -3299,7 +3314,7 @@
 
 Status CameraService::isConcurrentSessionConfigurationSupported(
         const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
-        int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+        int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/bool* isSupported) {
     if (!isSupported) {
         ALOGE("%s: isSupported is NULL", __FUNCTION__);
@@ -3314,10 +3329,11 @@
 
     for (auto cameraIdAndSessionConfiguration : cameraIdsAndSessionConfigurations) {
         std::optional<std::string> cameraIdOptional =
-                resolveCameraId(cameraIdAndSessionConfiguration.mCameraId, deviceId, devicePolicy);
+                resolveCameraId(cameraIdAndSessionConfiguration.mCameraId,
+                        clientAttribution.deviceId, devicePolicy);
         if (!cameraIdOptional.has_value()) {
             std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                    cameraIdAndSessionConfiguration.mCameraId.c_str(), deviceId);
+                    cameraIdAndSessionConfiguration.mCameraId.c_str(), clientAttribution.deviceId);
             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
@@ -3330,7 +3346,7 @@
     bool hasCameraPermission = ((callingPid == getpid()) ||
             hasPermissionsForCamera(callingPid, callingUid,
                     devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT
-                        ? kDefaultDeviceId : deviceId));
+                        ? kDefaultDeviceId : clientAttribution.deviceId));
     if (!hasCameraPermission) {
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "android.permission.CAMERA needed to call"
@@ -5767,7 +5783,7 @@
     }
 
     ALOGV("%s: Status has changed for camera ID %s from %#x to %#x", __FUNCTION__,
-            cameraId.c_str(), oldStatus, status);
+            cameraId.c_str(), eToI(oldStatus), eToI(status));
 
     if (oldStatus == StatusInternal::NOT_PRESENT &&
             (status != StatusInternal::PRESENT &&
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 9998fb8..6b21e05 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -154,13 +154,16 @@
     // resolveCameraId(logicalCameraId, deviceId, devicePolicy) to arrive at the correct
     // cameraId to perform the operation on (in case of contexts
     // associated with virtual devices).
-    virtual binder::Status     getNumberOfCameras(int32_t type, int32_t deviceId,
+    virtual binder::Status     getNumberOfCameras(int32_t type,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, int32_t* numCameras);
 
     virtual binder::Status     getCameraInfo(int cameraId, int rotationOverride,
-            int32_t deviceId, int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const std::string& cameraId,
-            int targetSdkVersion, int rotationOverride, int32_t deviceId,
+            int targetSdkVersion, int rotationOverride,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
@@ -170,17 +173,15 @@
             hardware::camera2::params::VendorTagDescriptorCache* cache);
 
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
-            int32_t cameraId, const std::string& clientPackageName,
-            int32_t clientUid, int clientPid, int targetSdkVersion,
-            int rotationOverride, bool forceSlowJpegMode, int32_t deviceId,
+            int32_t cameraId, int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, /*out*/ sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-            const std::string& cameraId,
-            const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
-            int32_t clientUid, int scoreOffset, int targetSdkVersion, int rotationOverride,
-            int32_t deviceId, int32_t devicePolicy,
+            const std::string& cameraId, int scoreOffset, int targetSdkVersion,
+            int rotationOverride, const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -196,7 +197,7 @@
 
     virtual binder::Status isConcurrentSessionConfigurationSupported(
         const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>& sessions,
-        int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+        int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/bool* supported);
 
     virtual binder::Status    getLegacyParameters(
@@ -205,13 +206,16 @@
             std::string* parameters);
 
     virtual binder::Status    setTorchMode(const std::string& cameraId, bool enabled,
-            const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy);
-
-    virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
-            int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
+            const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
             int32_t devicePolicy);
 
-    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId, int32_t deviceId,
+    virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
+            int32_t torchStrength, const sp<IBinder>& clientBinder,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy);
+
+    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, int32_t* torchStrength);
 
     virtual binder::Status    notifySystemEvent(int32_t eventId,
@@ -247,19 +251,20 @@
             const hardware::camera2::impl::CameraMetadataNative& sessionParams);
 
     virtual binder::Status createDefaultRequest(const std::string& cameraId, int templateId,
-            int32_t deviceId, int32_t devicePolicy,
+            const AttributionSourceState& clientAttribution, int32_t devicePolicy,
             /*out*/
             hardware::camera2::impl::CameraMetadataNative* request);
 
     virtual binder::Status isSessionConfigurationWithParametersSupported(
             const std::string& cameraId, int targetSdkVersion,
             const SessionConfiguration& sessionConfiguration,
-            int32_t deviceId, int32_t devicePolicy,
+            const AttributionSourceState& clientAttribution, int32_t devicePolicy,
             /*out*/ bool* supported);
 
     virtual binder::Status getSessionCharacteristics(
             const std::string& cameraId, int targetSdkVersion, int rotationOverride,
-            const SessionConfiguration& sessionConfiguration, int32_t deviceId,
+            const SessionConfiguration& sessionConfiguration,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, /*out*/ CameraMetadata* outMetadata);
 
     // Extra permissions checks
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 2886942..7f674bd 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -28,6 +28,7 @@
 #include <binder/Status.h>
 #include <camera/CameraUtils.h>
 #include <hidl/HidlTransportSupport.h>
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
@@ -89,10 +90,15 @@
     if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
 
     ::android::CameraMetadata cameraMetadata;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
     UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
                                                            mVndkVersion,
                                                            ROTATION_OVERRIDE_NONE,
-                                                           kDefaultDeviceId,
+                                                           clientAttribution,
                                                            /* devicePolicy= */ 0,
                                                            &cameraMetadata);
     if (!ret.isOk()) {
@@ -143,16 +149,20 @@
         return fromSStatus(SStatus::UNKNOWN_ERROR);
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
     binder::Status serviceRet = mCameraService->connectDevice(
             callbacks,
             in_cameraId,
-            std::string(),
-            /* clientFeatureId= */{},
-            hardware::ICameraService::USE_CALLING_UID,
             /* scoreOffset= */ 0,
             /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
             ROTATION_OVERRIDE_NONE,
-            kDefaultDeviceId,
+            clientAttribution,
             /* devicePolicy= */ 0,
             &unstableDevice);
     if (!serviceRet.isOk()) {
diff --git a/services/camera/libcameraservice/aidl/AidlUtils.cpp b/services/camera/libcameraservice/aidl/AidlUtils.cpp
index d23566c..1ec5072 100644
--- a/services/camera/libcameraservice/aidl/AidlUtils.cpp
+++ b/services/camera/libcameraservice/aidl/AidlUtils.cpp
@@ -26,6 +26,7 @@
 #include <device3/Camera3StreamInterface.h>
 #include <gui/bufferqueue/1.0/H2BGraphicBufferProducer.h>
 #include <mediautils/AImageReaderUtils.h>
+#include "utils/Utils.h"
 
 namespace android::hardware::cameraservice::utils::conversion::aidl {
 
@@ -347,8 +348,8 @@
     // Ensure the vendor ID are the same before attempting
     // anything else. If vendor IDs differ we cannot safely copy the characteristics.
     if (from.getVendorId() != to->getVendorId()) {
-        ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %lu; To: %lu",
-              __FUNCTION__, from.getVendorId(), to->getVendorId());
+        ALOGE("%s: Incompatible CameraMetadata objects. Vendor IDs differ. From: %" PRIu64
+              "; To: %" PRIu64, __FUNCTION__, from.getVendorId(), to->getVendorId());
         return BAD_VALUE;
     }
 
@@ -364,7 +365,7 @@
     for (size_t i = 0; i < get_camera_metadata_entry_count(src); i++) {
         int ret = get_camera_metadata_ro_entry(src, i, &entry);
         if (ret != OK) {
-            ALOGE("%s: Could not fetch entry at index %lu. Error: %d", __FUNCTION__, i, ret);
+            ALOGE("%s: Could not fetch entry at index %zu. Error: %d", __FUNCTION__, i, ret);
             from.unlock(src);
             return BAD_VALUE;
         }
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index 225d7f5..ea4f6fd 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -40,6 +40,7 @@
 #include "common/CameraDeviceBase.h"
 #include "utils/ExifUtils.h"
 #include "utils/SessionConfigurationUtils.h"
+#include "utils/Utils.h"
 #include "HeicEncoderInfoManager.h"
 #include "HeicCompositeStream.h"
 
@@ -1464,7 +1465,7 @@
     const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(CameraBlob));
     const CameraBlob *blob = (const CameraBlob*)(header);
     if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
-        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, blob->blobId);
+        ALOGE("%s: Invalid EXIF blobId %d", __FUNCTION__, eToI(blob->blobId));
         return 0;
     }
 
@@ -1588,7 +1589,7 @@
         // The chrome plane could be either Cb first, or Cr first. Take the
         // smaller address.
         uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
-        MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
+        MediaImage2::PlaneIndex dstPlane = codecUPlaneFirst ? MediaImage2::U : MediaImage2::V;
         for (auto row = top/2; row < (top+height)/2; row++) {
             uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
                     imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 6416c11..2440c37 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -51,6 +51,7 @@
 
 #include "api2/HeicCompositeStream.h"
 #include "device3/ZoomRatioMapper.h"
+#include "utils/Utils.h"
 
 namespace android {
 
@@ -92,7 +93,7 @@
         case TorchModeStatus::AVAILABLE_ON:
             return "AVAILABLE_ON";
     }
-    ALOGW("Unexpected HAL torch mode status code %d", s);
+    ALOGW("Unexpected HAL torch mode status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
@@ -105,7 +106,7 @@
         case CameraDeviceStatus::ENUMERATING:
             return "ENUMERATING";
     }
-    ALOGW("Unexpected HAL device status code %d", s);
+    ALOGW("Unexpected HAL device status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
@@ -2279,7 +2280,7 @@
                         return tryToInitializeAidlProviderLocked(removedAidlProviderName,
                                 providerInfo);
                     default:
-                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, providerTransport);
+                        ALOGE("%s Unsupported Transport %d", __FUNCTION__, eToI(providerTransport));
                 }
             }
         }
@@ -2366,7 +2367,7 @@
             }
             break;
         default:
-            ALOGE("%s Invalid transport %d", __FUNCTION__, transport);
+            ALOGE("%s Invalid transport %d", __FUNCTION__, eToI(transport));
             return BAD_VALUE;
     }
 
@@ -2712,7 +2713,7 @@
         }
         if (!known) {
             ALOGW("Camera provider %s says an unknown camera %s now has torch status %d. Curious.",
-                    mProviderName.c_str(), cameraDeviceName.c_str(), newStatus);
+                mProviderName.c_str(), cameraDeviceName.c_str(), eToI(newStatus));
             return;
         }
         // no lock needed since listener is set up only once during
diff --git a/services/camera/libcameraservice/common/HalConversionsTemplated.h b/services/camera/libcameraservice/common/HalConversionsTemplated.h
index 96a715c..c586062 100644
--- a/services/camera/libcameraservice/common/HalConversionsTemplated.h
+++ b/services/camera/libcameraservice/common/HalConversionsTemplated.h
@@ -19,6 +19,7 @@
 #include "common/CameraProviderManager.h"
 
 #include <device3/Camera3StreamInterface.h>
+#include <utils/Utils.h>
 
 namespace android {
 
@@ -48,7 +49,7 @@
         case HalCameraDeviceStatus::ENUMERATING:
             return CameraDeviceStatus::ENUMERATING;
     }
-    ALOGW("Unexpectedcamera device status code %d", s);
+    ALOGW("Unexpectedcamera device status code %d", eToI(s));
     return CameraDeviceStatus::NOT_PRESENT;
 }
 
@@ -74,7 +75,7 @@
         case HalTorchModeStatus::AVAILABLE_ON:
             return TorchModeStatus::AVAILABLE_ON;
     }
-    ALOGW("Unexpectedcamera torch mode status code %d", s);
+    ALOGW("Unexpectedcamera torch mode status code %d", eToI(s));
     return TorchModeStatus::NOT_AVAILABLE;
 }
 
@@ -88,7 +89,7 @@
         case HalCameraDeviceStatus::ENUMERATING:
             return "ENUMERATING";
     }
-    ALOGW("Unexpected HAL device status code %d", s);
+    ALOGW("Unexpected HAL device status code %d", eToI(s));
     return "UNKNOWN_STATUS";
 }
 
diff --git a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
index 1e546fb..40800d9 100644
--- a/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
+++ b/services/camera/libcameraservice/common/hidl/HidlProviderInfo.cpp
@@ -26,6 +26,7 @@
 #include "device3/ZoomRatioMapper.h"
 #include <utils/SessionConfigurationUtilsHidl.h>
 #include <utils/Trace.h>
+#include <utils/Utils.h>
 
 #include <android/hardware/camera/device/3.7/ICameraDevice.h>
 
@@ -69,7 +70,7 @@
         case Status::INTERNAL_ERROR:
             return INVALID_OPERATION;
     }
-    ALOGW("Unexpected HAL status code %d", s);
+    ALOGW("Unexpected HAL status code %d", eToI(s));
     return INVALID_OPERATION;
 }
 
@@ -111,7 +112,7 @@
         case Status::INTERNAL_ERROR:
             return "INTERNAL_ERROR";
     }
-    ALOGW("Unexpected HAL status code %d", s);
+    ALOGW("Unexpected HAL status code %d", eToI(s));
     return "UNKNOWN_ERROR";
 }
 
@@ -591,7 +592,7 @@
     }
     if (status != Status::OK) {
         ALOGE("%s: Unable to get camera characteristics for device %s: %s (%d)",
-                __FUNCTION__, id.c_str(), statusToString(status), status);
+                __FUNCTION__, id.c_str(), statusToString(status), eToI(status));
         return;
     }
 
@@ -770,7 +771,7 @@
             if (status != Status::OK) {
                 ALOGE("%s: Unable to get physical camera %s characteristics for device %s: %s (%d)",
                         __FUNCTION__, id.c_str(), mId.c_str(),
-                        statusToString(status), status);
+                        statusToString(status), eToI(status));
                 return;
             }
 
@@ -928,7 +929,7 @@
                 res = INVALID_OPERATION;
                 break;
             default:
-                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, callStatus);
+                ALOGE("%s: Session configuration query failed: %d", __FUNCTION__, eToI(callStatus));
                 res = UNKNOWN_ERROR;
         }
     } else {
@@ -1076,7 +1077,7 @@
                         break;
                     default:
                         ALOGE("%s: Session configuration query failed: %d", __FUNCTION__,
-                                  callStatus);
+                                eToI(callStatus));
                         res = UNKNOWN_ERROR;
                 }
             } else {
diff --git a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
index 9dacaf6..09299e6 100644
--- a/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
+++ b/services/camera/libcameraservice/device3/hidl/HidlCamera3Device.cpp
@@ -56,6 +56,7 @@
 #include "device3/hidl/HidlCamera3OfflineSession.h"
 #include "utils/SessionConfigurationUtilsHidl.h"
 #include "utils/TraceHFR.h"
+#include "utils/Utils.h"
 
 #include "../../common/hidl/HidlProviderInfo.h"
 #include "HidlCamera3Device.h"
@@ -881,7 +882,7 @@
                     ret = true;
                     break;
                 default:
-                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, callStatus);
+                    ALOGV("%s: Reconfiguration query failed: %d", __FUNCTION__, eToI(callStatus));
                     ret = true;
             }
         } else {
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index d3b2a51..59e892f 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -26,6 +26,7 @@
 #include <hidl/HidlTransportSupport.h>
 
 #include <camera/CameraUtils.h>
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
 
 namespace android {
@@ -68,10 +69,15 @@
                                             getCameraCharacteristics_cb _hidl_cb) {
     android::CameraMetadata cameraMetadata;
     HStatus status = HStatus::NO_ERROR;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(cameraId,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, 0, &cameraMetadata);
+                clientAttribution, 0, &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -119,11 +125,17 @@
         return Void();
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
     binder::Status serviceRet = mAidlICameraService->connectDevice(
-            callbacks, cameraId, std::string(), {},
-            hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
+            callbacks, cameraId, 0/*oomScoreOffset*/,
             /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
-            kDefaultDeviceId, /*devicePolicy*/0, /*out*/&deviceRemote);
+            clientAttribution, /*devicePolicy*/0, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index c710671..53234f0 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -35,6 +35,7 @@
         "libmedia_headers",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libbinder",
         "libbase",
         "libutils",
@@ -85,7 +86,7 @@
         "camera_service_fuzzer.cpp",
     ],
     defaults: [
-        "camera_service_fuzzer_defaults"
+        "camera_service_fuzzer_defaults",
     ],
 }
 
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index bce0faf..ac2fd64 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -23,6 +23,7 @@
 
 #include <CameraService.h>
 #include <device3/Camera3StreamInterface.h>
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/ICameraServiceListener.h>
@@ -219,7 +220,9 @@
     } else {
         camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
     }
-    mCameraService->getNumberOfCameras(camType, kDefaultDeviceId, /*devicePolicy*/0, &mNumCameras);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    mCameraService->getNumberOfCameras(camType, clientAttribution, /*devicePolicy*/0, &mNumCameras);
 }
 
 void CameraFuzzer::getCameraInformation(int32_t cameraId) {
@@ -238,14 +241,17 @@
     hardware::camera2::params::VendorTagDescriptorCache cache;
     mCameraService->getCameraVendorTagCache(&cache);
 
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, kDefaultDeviceId,
+    mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, clientAttribution,
             /*devicePolicy*/0, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
             /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
-            kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+            clientAttribution, /*devicePolicy*/0, &metadata);
 }
 
 void CameraFuzzer::invokeCameraSound() {
@@ -327,13 +333,15 @@
     std::string cameraIdStr = std::to_string(cameraId);
     sp<IBinder> binder = new BBinder;
 
-    mCameraService->setTorchMode(cameraIdStr, true, binder, kDefaultDeviceId, /*devicePolicy*/0);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    mCameraService->setTorchMode(cameraIdStr, true, binder, clientAttribution, /*devicePolicy*/0);
     ALOGV("Turned torch on.");
     int32_t torchStrength = rand() % 5 + 1;
     ALOGV("Changing torch strength level to %d", torchStrength);
     mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder,
-            kDefaultDeviceId, /*devicePolicy*/0);
-    mCameraService->setTorchMode(cameraIdStr, false, binder, kDefaultDeviceId, /*devicePolicy*/0);
+            clientAttribution, /*devicePolicy*/0);
+    mCameraService->setTorchMode(cameraIdStr, false, binder, clientAttribution, /*devicePolicy*/0);
     ALOGV("Turned torch off.");
 }
 
@@ -349,13 +357,15 @@
     ::android::binder::Status rc;
     sp<ICamera> cameraDevice;
 
-    rc = mCameraService->connect(this, cameraId, std::string(),
-                                 android::CameraService::USE_CALLING_UID,
-                                 android::CameraService::USE_CALLING_PID,
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+    clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+    rc = mCameraService->connect(this, cameraId,
                                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                                  ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
                                  /*forceSlowJpegMode*/false,
-                                 kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
+                                 clientAttribution, /*devicePolicy*/0, &cameraDevice);
     if (!rc.isOk()) {
         // camera not connected
         return;
@@ -590,11 +600,15 @@
     for (auto s : statuses) {
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
+        clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+        clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+        mCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, &device);
         if (device == nullptr) {
             continue;
         }
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 55e2c9d..8bb5ffa 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -30,7 +30,7 @@
         "ExifUtilsTest.cpp",
         "NV12Compressor.cpp",
         "RotateAndCropMapperTest.cpp",
-	"SessionStatsBuilderTest.cpp",
+        "SessionStatsBuilderTest.cpp",
         "ZoomRatioTest.cpp",
     ],
 
@@ -76,7 +76,7 @@
 
     defaults: [
         "libcameraservice_deps",
-	"cameraservice_test_hostsupported"
+        "cameraservice_test_hostsupported",
     ],
 
     // Only include libs that can't be run host-side here
@@ -118,13 +118,13 @@
     name: "cameraservice_test_host",
 
     defaults: [
-        "cameraservice_test_hostsupported"
+        "cameraservice_test_hostsupported",
     ],
 
     include_dirs: [
         "frameworks/av/camera/include",
         "frameworks/av/camera/include/camera",
-        "frameworks/native/libs/binder/include_activitymanager"
+        "frameworks/native/libs/binder/include_activitymanager",
     ],
 
     // Only include libs that can't be run device-side here
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index cf86a05..50aeaca 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <android/hardware/BnCameraServiceProxy.h>
 #include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
@@ -146,6 +147,11 @@
         return mCameraServiceProxy->notifyCameraState(cameraSessionStats);
     }
 
+    virtual binder::Status notifyFeatureCombinationStats(
+            const hardware::CameraFeatureCombinationStats& featureCombStats) override {
+        return mCameraServiceProxy->notifyFeatureCombinationStats(featureCombStats);
+    }
+
     virtual binder::Status isCameraDisabled(int userId, bool *ret) override {
         if (mOverrideCameraDisabled) {
             *ret = mCameraDisabled;
@@ -218,6 +224,11 @@
 // Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
 // policy, and succeed when it isn't.
 TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+    clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -228,11 +239,10 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
         ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -244,11 +254,10 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(status.isOk());
     }
@@ -256,6 +265,10 @@
 
 // Test that consecutive camera connections succeed.
 TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -265,20 +278,18 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
+                clientAttribution, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
+                clientAttribution, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
@@ -288,6 +299,10 @@
 // Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
 // in the second call.
 TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -297,20 +312,18 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
+                clientAttribution, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                1/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
+                clientAttribution, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index 4f238ab..4daab0f 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -41,6 +41,20 @@
         mCameraService = cameraService;
     }
 
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+        AttributionSourceState attributionSource{};
+        attributionSource.pid = callingPid;
+        attributionSource.uid = callingUid;
+        return attributionSource;
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            int32_t deviceId) {
+        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+        attributionSource.deviceId = deviceId;
+        return attributionSource;
+    }
+
     // Utilities handling Binder calling identities (previously in CameraThreadState)
     virtual int getCallingUid();
     virtual int getCallingPid();
@@ -123,17 +137,13 @@
             : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
-        AttributionSourceState attributionSource{};
-        attributionSource.pid = callingPid;
-        attributionSource.uid = callingUid;
-        return attributionSource;
+        return AttributionAndPermissionUtils::buildAttributionSource(callingPid, callingUid);
     }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
             int32_t deviceId) {
-        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
-        attributionSource.deviceId = deviceId;
-        return attributionSource;
+        return AttributionAndPermissionUtils::buildAttributionSource(
+                callingPid, callingUid, deviceId);
     }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
index 4afae9b..d5e3790 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.cpp
@@ -18,19 +18,25 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <gui/Surface.h>
 #include <inttypes.h>
 #include <utils/Log.h>
 #include <utils/String16.h>
 #include <camera/StringUtils.h>
 #include <binder/IServiceManager.h>
+#include <system/window.h>
+
+#include "aidl/android/hardware/graphics/common/Dataspace.h"
 
 #include "CameraServiceProxyWrapper.h"
 
 namespace android {
 
 using hardware::CameraExtensionSessionStats;
+using hardware::CameraFeatureCombinationStats;
 using hardware::CameraSessionStats;
 using hardware::ICameraServiceProxy;
+using hardware::camera2::params::SessionConfiguration;
 
 namespace {
 // Sentinel value to be returned when extension session with a stale or invalid key is reported.
@@ -215,6 +221,105 @@
     proxyBinder->pingForUserUpdate();
 }
 
+int64_t CameraServiceProxyWrapper::encodeSessionConfiguration(
+        const SessionConfiguration& sessionConfig) {
+    int64_t features = CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+    const static int32_t WIDTH_4K = 3840;
+    const static int32_t HEIGHT_4K = 2160;
+
+    // Check session parameters
+    if (sessionConfig.hasSessionParameters()) {
+        const auto& parameters = sessionConfig.getSessionParameters();
+
+        camera_metadata_ro_entry fpsEntry = parameters.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE);
+        if (fpsEntry.count == 2 && fpsEntry.data.i32[1] == 60) {
+            features |= CameraFeatureCombinationStats::CAMERA_FEATURE_60_FPS;
+        }
+
+        camera_metadata_ro_entry stabEntry =
+                parameters.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE);
+        if (stabEntry.count == 1 && stabEntry.data.u8[0] ==
+                ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_PREVIEW_STABILIZATION) {
+            features |= CameraFeatureCombinationStats::CAMERA_FEATURE_STABILIZATION;
+        }
+    }
+
+    // Check output configurations
+    const auto& outputConfigs = sessionConfig.getOutputConfigurations();
+    for (const auto& config : outputConfigs) {
+        int format = config.getFormat();
+        int dataSpace = config.getDataspace();
+        int64_t dynamicRangeProfile = config.getDynamicRangeProfile();
+
+        // Check JPEG and JPEG_R features
+        if (format == HAL_PIXEL_FORMAT_BLOB) {
+            if (dataSpace == HAL_DATASPACE_V0_JFIF) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_JPEG;
+            } else if (dataSpace == static_cast<android_dataspace_t>(
+                    aidl::android::hardware::graphics::common::Dataspace::JPEG_R)) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_JPEG_R;
+            }
+        } else {
+            if (dynamicRangeProfile == HAL_DATASPACE_BT2020_HLG) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_HLG10;
+            }
+
+            // Check 4K
+            const auto& gbps = config.getGraphicBufferProducers();
+            int32_t width = 0, height = 0;
+            if (gbps.size() > 0) {
+                sp<Surface> surface = new Surface(gbps[0], /*useAsync*/false);
+                ANativeWindow *anw = surface.get();
+
+                width = ANativeWindow_getWidth(anw);
+                if (width < 0) {
+                    ALOGE("%s: Failed to query Surface width: %s (%d)",
+                            __FUNCTION__, strerror(-width), width);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+                height = ANativeWindow_getHeight(anw);
+                if (height < 0) {
+                    ALOGE("%s: Failed to query Surface height: %s (%d)",
+                            __FUNCTION__, strerror(-height), height);
+                    return CameraFeatureCombinationStats::CAMERA_FEATURE_UNKNOWN;
+                }
+            } else {
+                width = config.getWidth();
+                height = config.getHeight();
+            }
+            if (width == WIDTH_4K && height == HEIGHT_4K) {
+                features |= CameraFeatureCombinationStats::CAMERA_FEATURE_4K;
+            }
+        }
+    }
+    return features;
+}
+
+// Note: The `ret` parameter is the return value of the
+// `isSessionConfigurationWithParametersSupporteed` binder call from the app.
+void CameraServiceProxyWrapper::logFeatureCombinationInternal(
+        const std::string &cameraId, int clientUid,
+        const SessionConfiguration& sessionConfiguration, binder::Status ret,
+        int type) {
+    sp<hardware::ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
+    if (proxyBinder == nullptr) return;
+
+    int64_t featureCombination = encodeSessionConfiguration(sessionConfiguration);
+    int queryStatus = ret.isOk() ? OK : ret.serviceSpecificErrorCode();
+    CameraFeatureCombinationStats stats;
+    stats.mCameraId = cameraId;
+    stats.mUid = clientUid;
+    stats.mFeatureCombination = featureCombination;
+    stats.mQueryType = type;
+    stats.mStatus = queryStatus;
+
+    auto status = proxyBinder->notifyFeatureCombinationStats(stats);
+    if (!status.isOk()) {
+        ALOGE("%s: Failed to notify feature combination stats: %s", __FUNCTION__,
+                status.exceptionMessage().c_str());
+    }
+}
+
 int CameraServiceProxyWrapper::getRotateAndCropOverride(const std::string &packageName,
         int lensFacing, int userId) {
     sp<ICameraServiceProxy> proxyBinder = getCameraServiceProxy();
diff --git a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
index b6a967f..ad8b1cd 100644
--- a/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
+++ b/services/camera/libcameraservice/utils/CameraServiceProxyWrapper.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
 #define ANDROID_SERVERS_CAMERA_SERVICE_PROXY_WRAPPER_H_
 
+#include <android/hardware/CameraFeatureCombinationStats.h>
 #include <android/hardware/ICameraServiceProxy.h>
 
 #include <utils/Mutex.h>
@@ -26,7 +27,7 @@
 #include <string>
 
 #include <camera/CameraSessionStats.h>
-
+#include <camera/camera2/SessionConfiguration.h>
 namespace android {
 
 class CameraServiceProxyWrapper {
@@ -86,6 +87,11 @@
     // ID generated for the open event associated with them.
     static int64_t generateLogId(std::random_device& randomDevice);
 
+    static int64_t encodeSessionConfiguration(const SessionConfiguration& sessionConfig);
+
+    void logFeatureCombinationInternal(const std::string &cameraId, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret, int type);
 public:
     CameraServiceProxyWrapper(sp<hardware::ICameraServiceProxy> serviceProxy = nullptr) :
             mCameraServiceProxy(serviceProxy)
@@ -115,6 +121,20 @@
             bool usedZoomOverride, std::pair<int32_t, int32_t> mostRequestedFpsRange,
             const std::vector<hardware::CameraStreamStats>& streamStats);
 
+    // Feature combination query
+    void logFeatureCombinationQuery(const std::string &id, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret) {
+        logFeatureCombinationInternal(id, clientUid, sessionConfiguration, ret,
+                (int)hardware::CameraFeatureCombinationStats::QueryType::QUERY_FEATURE_COMBINATION);
+    }
+    void logSessionCharacteristicsQuery(const std::string &id, int clientUid,
+            const hardware::camera2::params::SessionConfiguration& sessionConfiguration,
+            binder::Status ret) {
+        logFeatureCombinationInternal(id, clientUid, sessionConfiguration, ret, (int)
+                hardware::CameraFeatureCombinationStats::QueryType::QUERY_SESSION_CHARACTERISTICS);
+    }
+
     // Ping camera service proxy for user update
     void pingCameraServiceProxy();
 
diff --git a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
index cfa1815..9986a84 100644
--- a/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
+++ b/services/camera/libcameraservice/utils/SessionConfigurationUtilsHidl.cpp
@@ -23,6 +23,7 @@
 #include "device3/aidl/AidlCamera3Device.h"
 #include "device3/hidl/HidlCamera3Device.h"
 #include "device3/Camera3OutputStream.h"
+#include "utils/Utils.h"
 
 using android::camera3::OutputStreamInfo;
 using android::hardware::camera2::ICameraDeviceUser;
@@ -48,16 +49,16 @@
     hidl.streams.resize(aidl.streams.size());
     size_t i = 0;
     for (const auto &stream : aidl.streams) {
-        if (static_cast<int>(stream.dynamicRangeProfile) !=
+        if (eToI(stream.dynamicRangeProfile) !=
                 ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD) {
             ALOGE("%s Dynamic range profile %" PRId64 " not supported by HIDL", __FUNCTION__,
-                    stream.dynamicRangeProfile);
+                    eToI(stream.dynamicRangeProfile));
             return BAD_VALUE;
         }
 
-        if (static_cast<int>(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
+        if (eToI(stream.useCase) != ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES_DEFAULT) {
             ALOGE("%s Stream use case %" PRId64 "not supported by HIDL", __FUNCTION__,
-                    stream.useCase);
+                    eToI(stream.useCase));
             return BAD_VALUE;
         }
 
diff --git a/services/camera/libcameraservice/utils/Utils.h b/services/camera/libcameraservice/utils/Utils.h
index 4e90871..0eb5e2c 100644
--- a/services/camera/libcameraservice/utils/Utils.h
+++ b/services/camera/libcameraservice/utils/Utils.h
@@ -19,10 +19,21 @@
 
 #include <sched.h>
 #include <unistd.h>
+#include <type_traits>
 
 namespace android {
 
 /**
+ * Magically convert an enum to its underlying integer type, mostly so they can be
+ * printed with printf-style formatters without warnings.
+ * Backport of C++23 std::to_underlying()
+ */
+template<typename Enum>
+constexpr std::underlying_type_t<Enum> eToI(Enum val) {
+    return static_cast<std::underlying_type_t<Enum>>(val);
+}
+
+/**
  * As of Android V, ro.board.api_level returns the year and month of release (ex. 202404)
  * instead of release SDK version. This function maps year/month format back to release
  * SDK version.
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index 3557791..633acec 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -750,7 +750,7 @@
     return status;
   }
 
-  PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_READ_RARELY,
+  PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
                              fence);
   if (planesLock.getStatus() != OK) {
     ALOGE("Failed to lock hwBuffer planes");
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.cc b/services/camera/virtualcamera/util/EglDisplayContext.cc
index 54307b4..ccd0d71 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.cc
+++ b/services/camera/virtualcamera/util/EglDisplayContext.cc
@@ -98,16 +98,16 @@
 }
 
 EglDisplayContext::~EglDisplayContext() {
+  eglMakeCurrent(mEglDisplay, EGL_NO_SURFACE, EGL_NO_SURFACE, EGL_NO_CONTEXT);
   if (mEglSurface != EGL_NO_SURFACE) {
     eglDestroySurface(mEglDisplay, mEglSurface);
   }
-  if (mEglDisplay != EGL_NO_DISPLAY) {
-    eglTerminate(mEglDisplay);
-  }
   if (mEglContext != EGL_NO_CONTEXT) {
     eglDestroyContext(mEglDisplay, mEglContext);
   }
-  eglReleaseThread();
+  if (mEglDisplay != EGL_NO_DISPLAY) {
+    eglTerminate(mEglDisplay);
+  }
 }
 
 EGLDisplay EglDisplayContext::getEglDisplay() const {
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 47b8583..eda4169 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -230,6 +230,15 @@
                         kTextureCoords.size(), kTextureCoords.data());
 }
 
+EglTestPatternProgram::~EglTestPatternProgram() {
+  if (mPositionHandle != -1) {
+    glDisableVertexAttribArray(mPositionHandle);
+  }
+  if (mTextureCoordHandle != -1) {
+    glDisableVertexAttribArray(mTextureCoordHandle);
+  }
+}
+
 bool EglTestPatternProgram::draw(const std::chrono::nanoseconds timestamp) {
   // Load compiled shader.
   glUseProgram(mProgram);
diff --git a/services/camera/virtualcamera/util/EglProgram.h b/services/camera/virtualcamera/util/EglProgram.h
index a88308d..cf93157 100644
--- a/services/camera/virtualcamera/util/EglProgram.h
+++ b/services/camera/virtualcamera/util/EglProgram.h
@@ -46,6 +46,7 @@
 class EglTestPatternProgram : public EglProgram {
  public:
   EglTestPatternProgram();
+  virtual ~EglTestPatternProgram();
 
   bool draw(std::chrono::nanoseconds timestamp);
 
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index c81d36d..465531b 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -31,6 +31,12 @@
 namespace android {
 namespace companion {
 namespace virtualcamera {
+namespace {
+
+// Maximal number of buffers producer can dequeue without blocking.
+constexpr int kBufferProducerMaxDequeueBufferCount = 64;
+
+}  // namespace
 
 EglSurfaceTexture::EglSurfaceTexture(const uint32_t width, const uint32_t height)
     : mWidth(width), mHeight(height) {
@@ -40,6 +46,10 @@
     return;
   }
   BufferQueue::createBufferQueue(&mBufferProducer, &mBufferConsumer);
+  // Set max dequeue buffer count for producer to maximal value to prevent
+  // blocking when dequeuing input buffers.
+  mBufferProducer->setMaxDequeuedBufferCount(
+      kBufferProducerMaxDequeueBufferCount);
   mGlConsumer = sp<GLConsumer>::make(
       mBufferConsumer, mTextureId, GLConsumer::TEXTURE_EXTERNAL, false, false);
   mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
@@ -75,7 +85,26 @@
 }
 
 GLuint EglSurfaceTexture::updateTexture() {
-  mGlConsumer->updateTexImage();
+  int previousFrameId;
+  int framesAdvance = 0;
+  // Consume buffers one at the time.
+  // Contrary to the code comments in GLConsumer, the GLConsumer acquires
+  // next queued buffer (not the most recently queued buffer).
+  while (true) {
+    previousFrameId = mGlConsumer->getFrameNumber();
+    mGlConsumer->updateTexImage();
+    int currentFrameId = mGlConsumer->getFrameNumber();
+    if (previousFrameId == currentFrameId) {
+      // Frame number didn't change after updating the texture,
+      // this means we're at the end of the queue and current attached
+      // buffer is the most recent buffer.
+      break;
+    }
+
+    framesAdvance++;
+    previousFrameId = currentFrameId;
+  }
+  ALOGV("%s: Advanced %d frames", __func__, framesAdvance);
   return mTextureId;
 }
 
diff --git a/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp b/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
index d329e54..2b31de7 100644
--- a/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
+++ b/services/mediaextractor/fuzzers/MediaExtractorServiceFuzzer.cpp
@@ -22,6 +22,7 @@
 using ::android::MediaExtractorService;
 
 extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
+    signal(SIGPIPE, SIG_IGN);
     auto service = sp<MediaExtractorService>::make();
     fuzzService(service, FuzzedDataProvider(data, size));
     return 0;
diff --git a/services/mediametrics/AudioPowerUsage.cpp b/services/mediametrics/AudioPowerUsage.cpp
index 7dc445b..201d740 100644
--- a/services/mediametrics/AudioPowerUsage.cpp
+++ b/services/mediametrics/AudioPowerUsage.cpp
@@ -93,18 +93,27 @@
 /* static */
 bool AudioPowerUsage::deviceFromString(const std::string& device_string, int32_t& device) {
     static std::map<std::string, int32_t> deviceTable = {
-        { "AUDIO_DEVICE_OUT_EARPIECE",             OUTPUT_EARPIECE },
-        { "AUDIO_DEVICE_OUT_SPEAKER_SAFE",         OUTPUT_SPEAKER_SAFE },
-        { "AUDIO_DEVICE_OUT_SPEAKER",              OUTPUT_SPEAKER },
-        { "AUDIO_DEVICE_OUT_WIRED_HEADSET",        OUTPUT_WIRED_HEADSET },
-        { "AUDIO_DEVICE_OUT_WIRED_HEADPHONE",      OUTPUT_WIRED_HEADSET },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO",        OUTPUT_BLUETOOTH_SCO },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP",       OUTPUT_BLUETOOTH_A2DP },
-        { "AUDIO_DEVICE_OUT_USB_HEADSET",          OUTPUT_USB_HEADSET },
-        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET", OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_EARPIECE",                  OUTPUT_EARPIECE },
+        { "AUDIO_DEVICE_OUT_SPEAKER_SAFE",              OUTPUT_SPEAKER_SAFE },
+        { "AUDIO_DEVICE_OUT_SPEAKER",                   OUTPUT_SPEAKER },
+        { "AUDIO_DEVICE_OUT_WIRED_HEADSET",             OUTPUT_WIRED_HEADSET },
+        { "AUDIO_DEVICE_OUT_WIRED_HEADPHONE",           OUTPUT_WIRED_HEADSET },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO",             OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_SCO_HEADSET",     OUTPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP",            OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_HEADPHONES", OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLUETOOTH_A2DP_SPEAKER",    OUTPUT_BLUETOOTH_A2DP },
+        { "AUDIO_DEVICE_OUT_BLE_HEADSET",               OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_BLE_SPEAKER",               OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_BLE_BROADCAST",             OUTPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_OUT_USB_HEADSET",               OUTPUT_USB_HEADSET },
+        { "AUDIO_DEVICE_OUT_DGTL_DOCK_HEADSET",         OUTPUT_DOCK },
+        { "AUDIO_DEVICE_OUT_HDMI",                      OUTPUT_HDMI },
 
         { "AUDIO_DEVICE_IN_BUILTIN_MIC",           INPUT_BUILTIN_MIC },
         { "AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET", INPUT_BLUETOOTH_SCO },
+        { "AUDIO_DEVICE_IN_BLUETOOTH_BLE",         INPUT_BLUETOOTH_BLE },
+        { "AUDIO_DEVICE_IN_BLE_HEADSET",           INPUT_BLUETOOTH_BLE },
         { "AUDIO_DEVICE_IN_WIRED_HEADSET",         INPUT_WIRED_HEADSET_MIC },
         { "AUDIO_DEVICE_IN_USB_DEVICE",            INPUT_USB_HEADSET_MIC },
         { "AUDIO_DEVICE_IN_BACK_MIC",              INPUT_BUILTIN_BACK_MIC },
diff --git a/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
index 6e5a5cf..cf09113 100644
--- a/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
+++ b/services/mediametrics/include/mediametricsservice/AudioPowerUsage.h
@@ -71,6 +71,9 @@
         OUTPUT_BLUETOOTH_SCO    = 0x10,
         OUTPUT_BLUETOOTH_A2DP   = 0x20,
         OUTPUT_SPEAKER_SAFE     = 0x40,
+        OUTPUT_BLUETOOTH_BLE    = 0x80,
+        OUTPUT_DOCK             = 0x100,
+        OUTPUT_HDMI             = 0x200,
 
         INPUT_DEVICE_BIT        = 0x40000000,
         INPUT_BUILTIN_MIC       = INPUT_DEVICE_BIT | 0x1, // non-negative positive int32.
@@ -78,6 +81,7 @@
         INPUT_WIRED_HEADSET_MIC = INPUT_DEVICE_BIT | 0x4,
         INPUT_USB_HEADSET_MIC   = INPUT_DEVICE_BIT | 0x8,
         INPUT_BLUETOOTH_SCO     = INPUT_DEVICE_BIT | 0x10,
+        INPUT_BLUETOOTH_BLE     = INPUT_DEVICE_BIT | 0x20,
     };
 
     static bool typeFromString(const std::string& type_string, int32_t& type);
diff --git a/services/mediaresourcemanager/test/Android.bp b/services/mediaresourcemanager/test/Android.bp
index 5dfec30..ac41959 100644
--- a/services/mediaresourcemanager/test/Android.bp
+++ b/services/mediaresourcemanager/test/Android.bp
@@ -30,7 +30,7 @@
         "server_configurable_flags",
     ],
     defaults: [
-        "aconfig_lib_cc_static_link.defaults",
+        "aconfig_lib_cc_shared_link.defaults",
     ],
     include_dirs: [
         "frameworks/av/include",
@@ -84,6 +84,9 @@
         "libactivitymanager_aidl",
         "server_configurable_flags",
     ],
+    defaults: [
+        "aconfig_lib_cc_shared_link.defaults",
+    ],
     include_dirs: [
         "frameworks/av/include",
         "frameworks/av/services/mediaresourcemanager",
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.cpp b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
index 5b4fca9..d663f37 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.cpp
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.cpp
@@ -205,6 +205,8 @@
           "sample_rate=%u, channel_mask=%#x, device=%d",
           __func__, config->format, config->sample_rate,
           config->channel_mask, deviceId);
+
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     const status_t status = MmapStreamInterface::openMmapStream(streamDirection,
                                                                 &attributes,
                                                                 config,
@@ -246,7 +248,7 @@
           __func__, config->format, getDeviceId(), getSessionId());
 
     // Create MMAP/NOIRQ buffer.
-    result = createMmapBuffer();
+    result = createMmapBuffer_l();
     if (result != AAUDIO_OK) {
         goto error;
     }
@@ -283,7 +285,7 @@
     return result;
 
 error:
-    close();
+    close_l();
     // restore original requests
     setDeviceId(mRequestedDeviceId);
     setSessionId(requestedSessionId);
@@ -291,13 +293,28 @@
 }
 
 void AAudioServiceEndpointMMAP::close() {
-    if (mMmapStream != nullptr) {
-        // Needs to be explicitly cleared or CTS will fail but it is not clear why.
-        mMmapStream.clear();
+    bool closedIt = false;
+    {
+        const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+        closedIt = close_l();
+    }
+    if (closedIt) {
+        // TODO Why is this needed?
         AudioClock::sleepForNanos(100 * AAUDIO_NANOS_PER_MILLISECOND);
     }
 }
 
+bool AAudioServiceEndpointMMAP::close_l() { // requires mMmapStreamLock
+    bool closedIt = false;
+    if (mMmapStream != nullptr) {
+        // Needs to be explicitly cleared or CTS will fail but it is not clear why.
+        ALOGD("%s() clear mMmapStream", __func__);
+        mMmapStream.clear();
+        closedIt = true;
+    }
+    return closedIt;
+}
+
 aaudio_result_t AAudioServiceEndpointMMAP::startStream(sp<AAudioServiceStreamBase> stream,
                                                    audio_port_handle_t *clientHandle __unused) {
     // Start the client on behalf of the AAudio service.
@@ -318,7 +335,7 @@
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::stopStream(sp<AAudioServiceStreamBase> /*stream*/,
-                                                      audio_port_handle_t /*clientHandle*/) {
+                                                      audio_port_handle_t clientHandle) {
     mFramesTransferred.reset32();
 
     // Round 64-bit counter up to a multiple of the buffer capacity.
@@ -328,36 +345,68 @@
     mFramesTransferred.roundUp64(getBufferCapacity());
 
     // Use the port handle that was provided by openMmapStream().
-    ALOGV("%s() mPortHandle = %d", __func__, mPortHandle);
-    return stopClient(mPortHandle);
+    aaudio_result_t result = stopClient(mPortHandle);
+    ALOGD("%s(%d): called stopClient(%d=mPortHandle), returning %d", __func__,
+          (int)clientHandle, mPortHandle, result);
+    return result;
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::startClient(const android::AudioClient& client,
                                                        const audio_attributes_t *attr,
-                                                       audio_port_handle_t *clientHandle) {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->start(client, attr, clientHandle));
+                                                       audio_port_handle_t *portHandlePtr) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    } else if (!isConnected()) {
+        ALOGD("%s(): MMAP stream was disconnected", __func__);
+        return AAUDIO_ERROR_DISCONNECTED;
+    } else {
+        aaudio_result_t result = AAudioConvert_androidToAAudioResult(
+                mMmapStream->start(client, attr, portHandlePtr));
+        if (!isConnected() && (portHandlePtr != nullptr)) {
+            ALOGD("%s(): MMAP stream DISCONNECTED after starting port %d, will stop it",
+                  __func__, *portHandlePtr);
+            mMmapStream->stop(*portHandlePtr);
+            *portHandlePtr = AUDIO_PORT_HANDLE_NONE;
+            result = AAUDIO_ERROR_DISCONNECTED;
+        }
+        ALOGD("%s(): returning port %d, result %d", __func__,
+              (portHandlePtr == nullptr) ? -1 : *portHandlePtr, result);
+        return result;
+    }
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t clientHandle) {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->stop(clientHandle));
+aaudio_result_t AAudioServiceEndpointMMAP::stopClient(audio_port_handle_t portHandle) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGE("%s(%d): called after mMmapStream set to NULL", __func__, (int)portHandle);
+        return AAUDIO_ERROR_NULL;
+    } else {
+        aaudio_result_t result = AAudioConvert_androidToAAudioResult(
+                mMmapStream->stop(portHandle));
+        ALOGD("%s(%d): returning %d", __func__, (int)portHandle, result);
+        return result;
+    }
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::standby() {
-    return mMmapStream == nullptr
-            ? AAUDIO_ERROR_NULL
-            : AAudioConvert_androidToAAudioResult(mMmapStream->standby());
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    } else {
+        return AAudioConvert_androidToAAudioResult(mMmapStream->standby());
+    }
 }
 
 aaudio_result_t AAudioServiceEndpointMMAP::exitStandby(AudioEndpointParcelable* parcelable) {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mMmapStream == nullptr) {
         return AAUDIO_ERROR_NULL;
     }
     mAudioDataWrapper->reset();
-    const aaudio_result_t result = createMmapBuffer();
+    const aaudio_result_t result = createMmapBuffer_l();
     if (result == AAUDIO_OK) {
         getDownDataDescription(parcelable);
     }
@@ -367,10 +416,12 @@
 // Get free-running DSP or DMA hardware position from the HAL.
 aaudio_result_t AAudioServiceEndpointMMAP::getFreeRunningPosition(int64_t *positionFrames,
                                                                 int64_t *timeNanos) {
-    struct audio_mmap_position position;
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
         return AAUDIO_ERROR_NULL;
     }
+    struct audio_mmap_position position;
     const status_t status = mMmapStream->getMmapPosition(&position);
     ALOGV("%s() status= %d, pos = %d, nanos = %lld\n",
           __func__, status, position.position_frames, (long long) position.time_nanoseconds);
@@ -475,9 +526,14 @@
 aaudio_result_t AAudioServiceEndpointMMAP::getExternalPosition(uint64_t *positionFrames,
                                                                int64_t *timeNanos)
 {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
     if (mHalExternalPositionStatus != AAUDIO_OK) {
         return mHalExternalPositionStatus;
     }
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    }
     uint64_t tempPositionFrames;
     int64_t tempTimeNanos;
     const status_t status = mMmapStream->getExternalPosition(&tempPositionFrames, &tempTimeNanos);
@@ -552,13 +608,20 @@
     return mHalExternalPositionStatus;
 }
 
-aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer()
+// mMmapStreamLock should be held when calling this function.
+aaudio_result_t AAudioServiceEndpointMMAP::createMmapBuffer_l()
 {
     memset(&mMmapBufferinfo, 0, sizeof(struct audio_mmap_buffer_info));
     int32_t minSizeFrames = getBufferCapacity();
     if (minSizeFrames <= 0) { // zero will get rejected
         minSizeFrames = AAUDIO_BUFFER_CAPACITY_MIN;
     }
+
+    if (mMmapStream == nullptr) {
+        ALOGW("%s(): called after mMmapStream set to NULL", __func__);
+        return AAUDIO_ERROR_NULL;
+    }
+
     const status_t status = mMmapStream->createMmapBuffer(minSizeFrames, &mMmapBufferinfo);
     const bool isBufferShareable = mMmapBufferinfo.flags & AUDIO_MMAP_APPLICATION_SHAREABLE;
     if (status != OK) {
@@ -598,6 +661,7 @@
     // Call to HAL to make sure the transport FD was able to be closed by binder.
     // This is a tricky workaround for a problem in Binder.
     // TODO:[b/192048842] When that problem is fixed we may be able to remove or change this code.
+    ALOGD("%s() - call getMmapPosition() as a hack to clear FD stuck in Binder", __func__);
     struct audio_mmap_position position;
     mMmapStream->getMmapPosition(&position);
 
@@ -613,11 +677,14 @@
 }
 
 void AAudioServiceEndpointMMAP::reportData() {
+    const std::lock_guard<std::mutex> lock(mMmapStreamLock);
+
     if (mMmapStream == nullptr) {
         // This must not happen
         ALOGE("%s() invalid state, mmap stream is not initialized", __func__);
         return;
     }
+
     auto fifo = mAudioDataWrapper->getFifoBuffer();
     if (fifo == nullptr) {
         ALOGE("%s() fifo buffer is not initialized, cannot report data", __func__);
diff --git a/services/oboeservice/AAudioServiceEndpointMMAP.h b/services/oboeservice/AAudioServiceEndpointMMAP.h
index eaa578c..962d390 100644
--- a/services/oboeservice/AAudioServiceEndpointMMAP.h
+++ b/services/oboeservice/AAudioServiceEndpointMMAP.h
@@ -50,7 +50,7 @@
 
     aaudio_result_t open(const aaudio::AAudioStreamRequest &request) override;
 
-    void close() override;
+    void close() override EXCLUDES(mMmapStreamLock);
 
     aaudio_result_t startStream(android::sp<AAudioServiceStreamBase> stream,
                                 audio_port_handle_t *clientHandle) override;
@@ -60,15 +60,19 @@
 
     aaudio_result_t startClient(const android::AudioClient& client,
                                 const audio_attributes_t *attr,
-                                audio_port_handle_t *clientHandle)  override;
+                                audio_port_handle_t *clientHandle)  override
+                                EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t stopClient(audio_port_handle_t clientHandle)  override;
+    aaudio_result_t stopClient(audio_port_handle_t clientHandle)  override
+            EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t standby() override;
+    aaudio_result_t standby() override EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) override;
+    aaudio_result_t exitStandby(AudioEndpointParcelable* parcelable) override
+            EXCLUDES(mMmapStreamLock);
 
-    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override;
+    aaudio_result_t getFreeRunningPosition(int64_t *positionFrames, int64_t *timeNanos) override
+             EXCLUDES(mMmapStreamLock);
 
     aaudio_result_t getTimestamp(int64_t *positionFrames, int64_t *timeNanos) override;
 
@@ -88,22 +92,31 @@
         return mHardwareTimeOffsetNanos;
     }
 
-    aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos);
+    aaudio_result_t getExternalPosition(uint64_t *positionFrames, int64_t *timeNanos)
+            EXCLUDES(mMmapStreamLock);
 
-    int64_t nextDataReportTime();
+    int64_t nextDataReportTime() EXCLUDES(mMmapStreamLock);
 
-    void reportData();
+    void reportData() EXCLUDES(mMmapStreamLock);
 
 private:
 
-    aaudio_result_t openWithConfig(audio_config_base_t* config);
+    /**
+     *
+     * @return true if mMapStream was cleared
+     */
+    bool close_l() REQUIRES(mMmapStreamLock);
 
-    aaudio_result_t createMmapBuffer();
+    aaudio_result_t openWithConfig(audio_config_base_t* config) EXCLUDES(mMmapStreamLock);
+
+    aaudio_result_t createMmapBuffer_l() REQUIRES(mMmapStreamLock);
 
     MonotonicCounter                          mFramesTransferred;
 
     // Interface to the AudioFlinger MMAP support.
-    android::sp<android::MmapStreamInterface> mMmapStream;
+    mutable std::mutex                        mMmapStreamLock;
+    android::sp<android::MmapStreamInterface> mMmapStream GUARDED_BY(mMmapStreamLock);
+
     struct audio_mmap_buffer_info             mMmapBufferinfo;
 
     // There is only one port associated with an MMAP endpoint.
diff --git a/services/oboeservice/AAudioServiceStreamBase.cpp b/services/oboeservice/AAudioServiceStreamBase.cpp
index 20a5ea8..78cf706 100644
--- a/services/oboeservice/AAudioServiceStreamBase.cpp
+++ b/services/oboeservice/AAudioServiceStreamBase.cpp
@@ -227,6 +227,10 @@
         ALOGE("%s() has no endpoint", __func__);
         return AAUDIO_ERROR_INVALID_STATE;
     }
+    if (!endpoint->isConnected()) {
+        ALOGE("%s() endpoint was already disconnected", __func__);
+        return AAUDIO_ERROR_DISCONNECTED;
+    }
     return endpoint->startStream(this, &mClientHandle);
 }
 
@@ -334,6 +338,7 @@
 aaudio_result_t AAudioServiceStreamBase::stop_l() {
     aaudio_result_t result = AAUDIO_OK;
     if (!isRunning()) {
+        ALOGW("%s() stream not running, returning early", __func__);
         return result;
     }
     const int64_t beginNs = AudioClock::getNanoseconds();
diff --git a/services/oboeservice/AAudioServiceStreamMMAP.cpp b/services/oboeservice/AAudioServiceStreamMMAP.cpp
index fc53949..5203e50 100644
--- a/services/oboeservice/AAudioServiceStreamMMAP.cpp
+++ b/services/oboeservice/AAudioServiceStreamMMAP.cpp
@@ -150,9 +150,9 @@
 
 aaudio_result_t AAudioServiceStreamMMAP::startClient(const android::AudioClient& client,
                                                      const audio_attributes_t *attr,
-                                                     audio_port_handle_t *clientHandle) {
+                                                     audio_port_handle_t *portHandlePtr) {
     if (com::android::media::aaudio::start_stop_client_from_command_thread()) {
-        return sendStartClientCommand(client, attr, clientHandle);
+        return sendStartClientCommand(client, attr, portHandlePtr);
     } else {
         sp<AAudioServiceEndpoint> endpoint = mServiceEndpointWeak.promote();
         if (endpoint == nullptr) {
@@ -160,7 +160,9 @@
             return AAUDIO_ERROR_INVALID_STATE;
         }
         // Start the client on behalf of the application. Generate a new porthandle.
-        aaudio_result_t result = endpoint->startClient(client, attr, clientHandle);
+        aaudio_result_t result = endpoint->startClient(client, attr, portHandlePtr);
+        ALOGD("%s() flag off, got port %d", __func__,
+              ((portHandlePtr == nullptr) ? -1 : *portHandlePtr));
         return result;
     }
 }