Merge "Fix audio glitch when starting/stopping HFP call" into main am: ad70952b82 am: 727e8d5ddf

Original change: https://android-review.googlesource.com/c/platform/frameworks/av/+/3234684

Change-Id: Ie9318c74229c658473ce718e42d4b060a6a4afb7
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/aidl/com/android/media/permission/PermissionEnum.aidl b/aidl/com/android/media/permission/PermissionEnum.aidl
index 834a275..b08db44 100644
--- a/aidl/com/android/media/permission/PermissionEnum.aidl
+++ b/aidl/com/android/media/permission/PermissionEnum.aidl
@@ -21,11 +21,21 @@
  * {@hide}
  */
 enum PermissionEnum {
-    MODIFY_AUDIO_ROUTING = 0,
-    MODIFY_PHONE_STATE = 1,
-    CALL_AUDIO_INTERCEPTION = 2,
     // This is a runtime + WIU permission, which means data delivery should be protected by AppOps
     // We query the controller only for early fails/hard errors
-    RECORD_AUDIO = 3,
-    ENUM_SIZE = 4, // Not for actual usage
+    RECORD_AUDIO = 0,
+    MODIFY_AUDIO_ROUTING = 1,
+    MODIFY_AUDIO_SETTINGS = 2,
+    MODIFY_PHONE_STATE = 3,
+    MODIFY_DEFAULT_AUDIO_EFFECTS = 4,
+    WRITE_SECURE_SETTINGS = 5,
+    CALL_AUDIO_INTERCEPTION = 6,
+    ACCESS_ULTRASOUND = 7,
+    CAPTURE_AUDIO_OUTPUT = 8,
+    CAPTURE_MEDIA_OUTPUT = 9,
+    CAPTURE_AUDIO_HOTWORD = 10,
+    CAPTURE_TUNER_AUDIO_INPUT = 11,
+    CAPTURE_VOICE_COMMUNICATION_OUTPUT = 12,
+    BLUETOOTH_CONNECT = 13,
+    ENUM_SIZE = 14, // Not for actual usage, used by Java
 }
diff --git a/camera/Android.bp b/camera/Android.bp
index 9e1efae..d91fcb2 100644
--- a/camera/Android.bp
+++ b/camera/Android.bp
@@ -75,6 +75,7 @@
         local_include_dirs: ["aidl"],
         include_dirs: [
             "frameworks/native/aidl/gui",
+            "frameworks/native/libs/permission/aidl",
         ],
     },
 
@@ -106,6 +107,7 @@
 
     shared_libs: [
         "camera_platform_flags_c_lib",
+        "framework-permission-aidl-cpp",
         "lib-platform-compat-native-api",
         "libbase",
         "libbinder",
@@ -114,6 +116,7 @@
         "libgui",
         "liblog",
         "libnativewindow",
+        "libpermission",
         "libutils",
     ],
 
@@ -126,6 +129,7 @@
         "include/camera",
     ],
     export_shared_lib_headers: [
+        "framework-permission-aidl-cpp",
         "libcamera_metadata",
         "libgui",
         "libnativewindow",
diff --git a/camera/Camera.cpp b/camera/Camera.cpp
index 8018390..d90f7c9 100644
--- a/camera/Camera.cpp
+++ b/camera/Camera.cpp
@@ -69,13 +69,12 @@
     // deadlock if we call any method of ICamera here.
 }
 
-sp<Camera> Camera::connect(int cameraId, const std::string& clientPackageName,
-        int clientUid, int clientPid, int targetSdkVersion, int rotationOverride,
-        bool forceSlowJpegMode, int32_t deviceId, int32_t devicePolicy)
+sp<Camera> Camera::connect(int cameraId, int targetSdkVersion, int rotationOverride,
+        bool forceSlowJpegMode, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy)
 {
-    return CameraBaseT::connect(cameraId, clientPackageName, clientUid,
-            clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
-            devicePolicy);
+    return CameraBaseT::connect(cameraId, targetSdkVersion, rotationOverride,
+            forceSlowJpegMode, clientAttribution, devicePolicy);
 }
 
 status_t Camera::reconnect()
diff --git a/camera/CameraBase.cpp b/camera/CameraBase.cpp
index d7415a3..774db25 100644
--- a/camera/CameraBase.cpp
+++ b/camera/CameraBase.cpp
@@ -161,10 +161,10 @@
 
 template <typename TCam, typename TCamTraits>
 sp<TCam> CameraBase<TCam, TCamTraits>::connect(int cameraId,
-                                               const std::string& clientPackageName,
-                                               int clientUid, int clientPid, int targetSdkVersion,
-                                               int rotationOverride, bool forceSlowJpegMode,
-                                               int32_t deviceId, int32_t devicePolicy)
+                                               int targetSdkVersion, int rotationOverride,
+                                               bool forceSlowJpegMode,
+                                               const AttributionSourceState& clientAttribution,
+                                               int32_t devicePolicy)
 {
     ALOGV("%s: connect", __FUNCTION__);
     sp<TCam> c = new TCam(cameraId);
@@ -176,9 +176,9 @@
         TCamConnectService fnConnectService = TCamTraits::fnConnectService;
         ALOGI("Connect camera (legacy API) - rotationOverride %d, forceSlowJpegMode %d",
                 rotationOverride, forceSlowJpegMode);
-        ret = (cs.get()->*fnConnectService)(cl, cameraId, clientPackageName, clientUid,
-                clientPid, targetSdkVersion, rotationOverride, forceSlowJpegMode, deviceId,
-                devicePolicy, /*out*/ &c->mCamera);
+        ret = (cs.get()->*fnConnectService)(cl, cameraId, targetSdkVersion,
+                rotationOverride, forceSlowJpegMode, clientAttribution, devicePolicy,
+                /*out*/ &c->mCamera);
     }
     if (ret.isOk() && c->mCamera != nullptr) {
         IInterface::asBinder(c->mCamera)->linkToDeath(c);
@@ -257,7 +257,8 @@
 }
 
 template <typename TCam, typename TCamTraits>
-int CameraBase<TCam, TCamTraits>::getNumberOfCameras(int32_t deviceId, int32_t devicePolicy) {
+int CameraBase<TCam, TCamTraits>::getNumberOfCameras(
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
 
     if (!cs.get()) {
@@ -266,7 +267,7 @@
     }
     int32_t count;
     binder::Status res = cs->getNumberOfCameras(
-            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, deviceId,
+            ::android::hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE, clientAttribution,
             devicePolicy, &count);
     if (!res.isOk()) {
         ALOGE("Error reading number of cameras: %s",
@@ -279,12 +280,12 @@
 // this can be in BaseCamera but it should be an instance method
 template <typename TCam, typename TCamTraits>
 status_t CameraBase<TCam, TCamTraits>::getCameraInfo(int cameraId,
-        int rotationOverride, int32_t deviceId, int32_t devicePolicy,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         struct hardware::CameraInfo* cameraInfo) {
     const sp<::android::hardware::ICameraService> cs = getCameraService();
     if (cs == 0) return UNKNOWN_ERROR;
-    binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, deviceId, devicePolicy,
-            cameraInfo);
+    binder::Status res = cs->getCameraInfo(cameraId, rotationOverride, clientAttribution,
+            devicePolicy, cameraInfo);
     return res.isOk() ? OK : res.serviceSpecificErrorCode();
 }
 
diff --git a/camera/aidl/android/hardware/ICameraService.aidl b/camera/aidl/android/hardware/ICameraService.aidl
index d9a0934..ce6c2d3 100644
--- a/camera/aidl/android/hardware/ICameraService.aidl
+++ b/camera/aidl/android/hardware/ICameraService.aidl
@@ -16,6 +16,7 @@
 
 package android.hardware;
 
+import android.content.AttributionSourceState;
 import android.hardware.ICamera;
 import android.hardware.ICameraClient;
 import android.hardware.camera2.ICameraDeviceUser;
@@ -66,13 +67,13 @@
      *
      * @param type The type of the camera, can be either CAMERA_TYPE_BACKWARD_COMPATIBLE
      *        or CAMERA_TYPE_ALL.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
-    int getNumberOfCameras(int type, int deviceId, int devicePolicy);
+    int getNumberOfCameras(int type, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * If changed, reflect in
@@ -97,19 +98,20 @@
      *        will override the sensor orientation and rotate and crop, while {@link
      *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
      *        without changing the sensor orientation.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      * @return CameraInfo for the camera.
      */
-    CameraInfo getCameraInfo(int cameraId, int rotationOverride, int deviceId,
-            int devicePolicy);
+    CameraInfo getCameraInfo(int cameraId, int rotationOverride,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
-     * Default UID/PID values for non-privileged callers of
-     * connect() and connectDevice()
+     * Default UID/PID values for non-privileged callers of connect() and connectDevice(). Can be
+     * used to set the pid/uid fields of AttributionSourceState to indicate the calling uid/pid
+     * should be used.
      */
     const int USE_CALLING_UID = -1;
     const int USE_CALLING_PID = -1;
@@ -118,9 +120,6 @@
      * Open a camera device through the old camera API.
      *
      * @param cameraId The ID of the camera to open.
-     * @param opPackageName The package name to report for the app-ops.
-     * @param clientUid UID for the calling client.
-     * @param clientPid PID for the calling client.
      * @param targetSdkVersion the target sdk level of the application calling this function.
      * @param rotationOverride Whether to override the sensor orientation information to
      *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
@@ -128,7 +127,7 @@
      *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
      *        without changing the sensor orientation.
      * @param forceSlowJpegMode Whether to force slow jpeg mode.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -136,12 +135,10 @@
      */
     ICamera connect(ICameraClient client,
             int cameraId,
-            @utf8InCpp String opPackageName,
-            int clientUid, int clientPid,
             int targetSdkVersion,
             int rotationOverride,
             boolean forceSlowJpegMode,
-            int deviceId,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 
     /**
@@ -149,15 +146,13 @@
      * Only supported for device HAL versions >= 3.2.
      *
      * @param cameraId The ID of the camera to open.
-     * @param opPackageName The package name to report for the app-ops.
-     * @param clientUid UID for the calling client.
      * @param targetSdkVersion the target sdk level of the application calling this function.
      * @param rotationOverride Whether to override the sensor orientation information to
      *        correspond to portrait: {@link ICameraService#ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT}
      *        will override the sensor orientation and rotate and crop, while {@link
      *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
      *        without changing the sensor orientation.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -165,12 +160,10 @@
      */
     ICameraDeviceUser connectDevice(ICameraDeviceCallbacks callbacks,
             @utf8InCpp String cameraId,
-            @utf8InCpp String opPackageName,
-            @nullable @utf8InCpp String featureId,
-            int clientUid, int oomScoreOffset,
+            int oomScoreOffset,
             int targetSdkVersion,
             int rotationOverride,
-            int deviceId,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 
     /**
@@ -194,7 +187,7 @@
      *
      * @param sessions the set of camera id and session configuration pairs to be queried.
      * @param targetSdkVersion the target sdk level of the application calling this function.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -206,7 +199,7 @@
      */
     boolean isConcurrentSessionConfigurationSupported(
             in CameraIdAndSessionConfiguration[] sessions,
-            int targetSdkVersion, int deviceId, int devicePolicy);
+            int targetSdkVersion, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Inject Session Params into an existing camera session.
@@ -236,7 +229,7 @@
      *        will override the sensor orientation and rotate and crop, while {@link
      *        ICameraService#ROTATION_OVERRIDE_ROTATION_ONLY} will rotate and crop the camera feed
      *        without changing the sensor orientation.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -244,7 +237,7 @@
      * @return Characteristics for the given camera.
      */
     CameraMetadataNative getCameraCharacteristics(@utf8InCpp String cameraId, int targetSdkVersion,
-            int rotationOverride, int deviceId, int devicePolicy);
+            int rotationOverride, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Read in the vendor tag descriptors from the camera module HAL.
@@ -284,14 +277,14 @@
      * Set the torch mode for a camera device.
      *
      * @param cameraId The ID of the camera to set torch mode for.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
     void setTorchMode(@utf8InCpp String cameraId, boolean enabled, IBinder clientBinder,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Change the brightness level of the flash unit associated with cameraId to strengthLevel.
@@ -299,27 +292,28 @@
      *
      * @param cameraId The ID of the camera.
      * @param strengthLevel The torch strength level to set for the camera.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      */
     void turnOnTorchWithStrengthLevel(@utf8InCpp String cameraId, int strengthLevel,
-            IBinder clientBinder, int deviceId, int devicePolicy);
+            IBinder clientBinder, in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Get the brightness level of the flash unit associated with cameraId.
      *
      * @param cameraId The ID of the camera.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
      *                     policy.
      * @return Torch strength level for the camera.
      */
-    int getTorchStrengthLevel(@utf8InCpp String cameraId, int deviceId, int devicePolicy);
+    int getTorchStrengthLevel(@utf8InCpp String cameraId,
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Notify the camera service of a system event.  Should only be called from system_server.
@@ -385,7 +379,7 @@
      *
      * @param cameraId The camera id to create the CaptureRequest for.
      * @param templateId The template id create the CaptureRequest for.
-     * @param deviceId the device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -393,7 +387,7 @@
      * @return Metadata representing the CaptureRequest.
      */
     CameraMetadataNative createDefaultRequest(@utf8InCpp String cameraId, int templateId,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Check whether a particular session configuration with optional session parameters
@@ -402,7 +396,7 @@
      * @param cameraId The camera id to query session configuration for
      * @param targetSdkVersion the target sdk level of the application calling this function.
      * @param sessionConfiguration Specific session configuration to be verified.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -412,7 +406,7 @@
      */
     boolean isSessionConfigurationWithParametersSupported(@utf8InCpp String cameraId,
             int targetSdkVersion, in SessionConfiguration sessionConfiguration,
-            int deviceId, int devicePolicy);
+            in AttributionSourceState clientAttribution, int devicePolicy);
 
     /**
      * Get the camera characteristics for a particular session configuration for
@@ -427,7 +421,7 @@
      *        without changing the sensor orientation.
      * @param sessionConfiguration Session configuration for which the characteristics
      *                             must be fetched.
-     * @param deviceId The device id of the context associated with the caller.
+     * @param clientAttribution The AttributionSource of the client.
      * @param devicePolicy The camera policy of the device of the associated context (default
      *                     policy for default device context). Only virtual cameras would be exposed
      *                     only for custom policy and only real cameras would be exposed for default
@@ -438,6 +432,6 @@
             int targetSdkVersion,
             int rotationOverride,
             in SessionConfiguration sessionConfiguration,
-            int deviceId,
+            in AttributionSourceState clientAttribution,
             int devicePolicy);
 }
diff --git a/camera/camera_platform.aconfig b/camera/camera_platform.aconfig
index fe10e12..e916985 100644
--- a/camera/camera_platform.aconfig
+++ b/camera/camera_platform.aconfig
@@ -247,3 +247,33 @@
         purpose: PURPOSE_BUGFIX
     }
 }
+
+flag {
+    namespace: "camera_platform"
+    name: "api1_release_binderlock_before_cameraservice_disconnect"
+    description: "Drop mSerializationLock in Camera1 client when calling into CameraService"
+    bug: "351778072"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "bump_preview_frame_space_priority"
+    description: "Increase the PreviewFrameSpacer thread priority"
+    bug: "355665306"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
+
+flag {
+    namespace: "camera_platform"
+    name: "dumpsys_request_stream_ids"
+    description: "Add stream id information to last request dumpsys"
+    bug: "357913929"
+    metadata {
+        purpose: PURPOSE_BUGFIX
+    }
+}
\ No newline at end of file
diff --git a/camera/include/camera/Camera.h b/camera/include/camera/Camera.h
index 3ecd10d..646b139 100644
--- a/camera/include/camera/Camera.h
+++ b/camera/include/camera/Camera.h
@@ -59,7 +59,7 @@
     typedef ::android::hardware::ICameraClient TCamCallbacks;
     typedef ::android::binder::Status (::android::hardware::ICameraService::*TCamConnectService)
         (const sp<::android::hardware::ICameraClient>&,
-        int, const std::string&, int, int, int, int, bool, int32_t, int32_t,
+        int, int, int, bool, const AttributionSourceState&, int32_t,
         /*out*/
         sp<::android::hardware::ICamera>*);
     static TCamConnectService     fnConnectService;
@@ -81,10 +81,9 @@
             // construct a camera client from an existing remote
     static  sp<Camera>  create(const sp<::android::hardware::ICamera>& camera);
     static  sp<Camera>  connect(int cameraId,
-                                const std::string& clientPackageName,
-                                int clientUid, int clientPid, int targetSdkVersion,
-                                int rotationOverride, bool forceSlowJpegMode,
-                                int32_t deviceId = kDefaultDeviceId, int32_t devicePolicy = 0);
+                                int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+                                const AttributionSourceState& clientAttribution,
+                                int32_t devicePolicy = 0);
 
             virtual     ~Camera();
 
diff --git a/camera/include/camera/CameraBase.h b/camera/include/camera/CameraBase.h
index 3370b3d..d98abe4 100644
--- a/camera/include/camera/CameraBase.h
+++ b/camera/include/camera/CameraBase.h
@@ -17,6 +17,7 @@
 #ifndef ANDROID_HARDWARE_CAMERA_BASE_H
 #define ANDROID_HARDWARE_CAMERA_BASE_H
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/ICameraServiceListener.h>
 
 #include <utils/Mutex.h>
@@ -107,6 +108,7 @@
 
 } // namespace hardware
 
+using content::AttributionSourceState;
 using hardware::CameraInfo;
 
 template <typename TCam>
@@ -123,19 +125,19 @@
     typedef typename TCamTraits::TCamConnectService TCamConnectService;
 
     static sp<TCam>      connect(int cameraId,
-                                 const std::string& clientPackageName,
-                                 int clientUid, int clientPid, int targetSdkVersion,
-                                 int rotationOverride, bool forceSlowJpegMode,
-                                 int32_t deviceId, int32_t devicePolicy);
+                                 int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+                                 const AttributionSourceState &clientAttribution,
+                                 int32_t devicePolicy);
     virtual void         disconnect();
 
     void                 setListener(const sp<TCamListener>& listener);
 
-    static int           getNumberOfCameras(int32_t deviceId, int32_t devicePolicy);
+    static int           getNumberOfCameras(const AttributionSourceState& clientAttribution,
+                                            int32_t devicePolicy);
 
     static status_t      getCameraInfo(int cameraId,
                                        int rotationOverride,
-                                       int32_t deviceId,
+                                       const AttributionSourceState& clientAttribution,
                                        int32_t devicePolicy,
                                        /*out*/
                                        struct hardware::CameraInfo* cameraInfo);
diff --git a/camera/ndk/Android.bp b/camera/ndk/Android.bp
index a603659..379c0b5 100644
--- a/camera/ndk/Android.bp
+++ b/camera/ndk/Android.bp
@@ -79,6 +79,7 @@
     shared_libs: [
         "android.companion.virtual.virtualdevice_aidl-cpp",
         "android.companion.virtualdevice.flags-aconfig-cc",
+        "framework-permission-aidl-cpp",
         "libandroid_runtime",
         "libbinder",
         "libcamera_client",
diff --git a/camera/ndk/impl/ACameraManager.cpp b/camera/ndk/impl/ACameraManager.cpp
index 0c80e7e..6d29ef5 100644
--- a/camera/ndk/impl/ACameraManager.cpp
+++ b/camera/ndk/impl/ACameraManager.cpp
@@ -807,9 +807,15 @@
 
     CameraMetadata rawMetadata;
     int targetSdkVersion = android_get_application_target_sdk_version();
+
+    AttributionSourceState clientAttribution;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+    clientAttribution.deviceId = mDeviceContext.deviceId;
+
     binder::Status serviceRet = cs->getCameraCharacteristics(cameraIdStr,
             targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-            mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
             &rawMetadata);
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -857,13 +863,20 @@
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = device->getServiceCallback();
     sp<hardware::camera2::ICameraDeviceUser> deviceRemote;
     int targetSdkVersion = android_get_application_target_sdk_version();
+
+    AttributionSourceState clientAttribution;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+    clientAttribution.deviceId = mDeviceContext.deviceId;
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
+
     // No way to get package name from native.
     // Send a zero length package name and let camera service figure it out from UID
     binder::Status serviceRet = cs->connectDevice(
-            callbacks, cameraId, "", {},
-            hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/0,
+            callbacks, cameraId, /*oomScoreOffset*/0,
             targetSdkVersion, /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-            mDeviceContext.deviceId, static_cast<int32_t>(mDeviceContext.policy),
+            clientAttribution, static_cast<int32_t>(mDeviceContext.policy),
             /*out*/&deviceRemote);
 
     if (!serviceRet.isOk()) {
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index cf6b970..1400121 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -576,9 +576,7 @@
  *
  * @param session the capture session of interest
  *
- * @return <ul><li>
- *             {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
- *             if it is not NULL.</li>
+ * @return <ul><li>{@link ACAMERA_OK} if the method succeeds.</li>
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
  *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
@@ -617,9 +615,7 @@
  *
  * @param session the capture session of interest
  *
- * @return <ul><li>
- *             {@link ACAMERA_OK} if the method succeeds. captureSequenceId will be filled
- *             if it is not NULL.</li>
+ * @return <ul><li> {@link ACAMERA_OK} if the method succeeds</li>
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if session is NULL.</li>
  *         <li>{@link ACAMERA_ERROR_SESSION_CLOSED} if the capture session has been closed</li>
  *         <li>{@link ACAMERA_ERROR_CAMERA_DISCONNECTED} if the camera device is closed</li>
diff --git a/camera/tests/Android.bp b/camera/tests/Android.bp
index 9aaac6a..484335a 100644
--- a/camera/tests/Android.bp
+++ b/camera/tests/Android.bp
@@ -29,6 +29,7 @@
         "CameraCharacteristicsPermission.cpp",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "liblog",
         "libutils",
         "libcutils",
diff --git a/camera/tests/CameraBinderTests.cpp b/camera/tests/CameraBinderTests.cpp
index e5f99be..5135b5d 100644
--- a/camera/tests/CameraBinderTests.cpp
+++ b/camera/tests/CameraBinderTests.cpp
@@ -33,6 +33,7 @@
 #include <hardware/gralloc.h>
 
 #include <camera/CameraMetadata.h>
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/ICameraService.h>
 #include <android/hardware/ICameraServiceListener.h>
 #include <android/hardware/BnCameraServiceListener.h>
@@ -46,6 +47,7 @@
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
@@ -347,7 +349,11 @@
     binder::Status res;
 
     int32_t numCameras = 0;
-    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId,
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+    clientAttribution.packageName = "meeeeeeeee!";
+    res = service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution,
             /*devicePolicy*/0, &numCameras);
     EXPECT_TRUE(res.isOk()) << res;
     EXPECT_LE(0, numCameras);
@@ -360,7 +366,7 @@
 
     EXPECT_EQ(numCameras, static_cast<const int>(statuses.size()));
     for (const auto &it : statuses) {
-        listener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
+        listener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
     }
 
     for (int32_t i = 0; i < numCameras; i++) {
@@ -379,7 +385,7 @@
         CameraMetadata metadata;
         res = service->getCameraCharacteristics(cameraId,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                clientAttribution, /*devicePolicy*/0, &metadata);
         EXPECT_TRUE(res.isOk()) << res;
         EXPECT_FALSE(metadata.isEmpty());
 
@@ -393,10 +399,10 @@
         // Check connect binder calls
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        res = service->connectDevice(callbacks, cameraId, "meeeeeeeee!",
-                {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+        res = service->connectDevice(callbacks, cameraId,
+                /*oomScoreOffset*/ 0,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, /*out*/&device);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, /*out*/&device);
         EXPECT_TRUE(res.isOk()) << res;
         ASSERT_NE(nullptr, device.get());
         device->disconnect();
@@ -406,12 +412,12 @@
         if (torchStatus == hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF) {
             // Check torch calls
             res = service->setTorchMode(cameraId,
-                    /*enabled*/true, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
+                    /*enabled*/true, callbacks, clientAttribution, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_ON, i));
             res = service->setTorchMode(cameraId,
-                    /*enabled*/false, callbacks, kDefaultDeviceId, /*devicePolicy*/0);
+                    /*enabled*/false, callbacks, clientAttribution, /*devicePolicy*/0);
             EXPECT_TRUE(res.isOk()) << res;
             EXPECT_TRUE(listener->waitForTorchState(
                     hardware::ICameraServiceListener::TORCH_STATUS_AVAILABLE_OFF, i));
@@ -437,10 +443,14 @@
         sp<hardware::camera2::ICameraDeviceUser> device;
         {
             SCOPED_TRACE("openNewDevice");
-            binder::Status res = service->connectDevice(callbacks, deviceId, "meeeeeeeee!",
-                    {}, hardware::ICameraService::USE_CALLING_UID, /*oomScoreOffset*/ 0,
+            AttributionSourceState clientAttribution;
+            clientAttribution.deviceId = kDefaultDeviceId;
+            clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+            clientAttribution.packageName = "meeeeeeeee!";
+            binder::Status res = service->connectDevice(callbacks, deviceId,
+                    /*oomScoreOffset*/ 0,
                     /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                    /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0,
+                    /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0,
                     /*out*/&device);
             EXPECT_TRUE(res.isOk()) << res;
         }
@@ -473,11 +483,13 @@
         serviceListener = new TestCameraServiceListener();
         std::vector<hardware::CameraStatus> statuses;
         service->addListener(serviceListener, &statuses);
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         for (const auto &it : statuses) {
-            serviceListener->onStatusChanged(it.status, it.cameraId, kDefaultDeviceId);
+            serviceListener->onStatusChanged(it.status, it.cameraId, clientAttribution.deviceId);
         }
         service->getNumberOfCameras(hardware::ICameraService::CAMERA_TYPE_BACKWARD_COMPATIBLE,
-                kDefaultDeviceId, /*devicePolicy*/0, &numCameras);
+                clientAttribution, /*devicePolicy*/0, &numCameras);
     }
 
     virtual void TearDown() {
@@ -507,6 +519,23 @@
 
         // Setup a buffer queue; I'm just using the vendor opaque format here as that is
         // guaranteed to be present
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 2, /*controlledByApp*/ true);
+        EXPECT_TRUE(opaqueConsumer.get() != nullptr);
+        opaqueConsumer->setName(String8("nom nom nom"));
+
+        // Set to VGA dimens for default, as that is guaranteed to be present
+        EXPECT_EQ(OK, opaqueConsumer->setDefaultBufferSize(640, 480));
+        EXPECT_EQ(OK,
+                  opaqueConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED));
+
+        sp<Surface> surface = opaqueConsumer->getSurface();
+
+        sp<IGraphicBufferProducer> producer = surface->getIGraphicBufferProducer();
+        std::string noPhysicalId;
+        OutputConfiguration output(producer, /*rotation*/ 0, noPhysicalId);
+#else
         sp<IGraphicBufferProducer> gbProducer;
         sp<IGraphicBufferConsumer> gbConsumer;
         BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
@@ -523,6 +552,7 @@
 
         std::string noPhysicalId;
         OutputConfiguration output(gbProducer, /*rotation*/0, noPhysicalId);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         // Can we configure?
         res = device->beginConfigure();
diff --git a/camera/tests/CameraCharacteristicsPermission.cpp b/camera/tests/CameraCharacteristicsPermission.cpp
index 10f7f22..9204eb1 100644
--- a/camera/tests/CameraCharacteristicsPermission.cpp
+++ b/camera/tests/CameraCharacteristicsPermission.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -47,8 +48,10 @@
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+            hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
             &numCameras);
     EXPECT_TRUE(rc.isOk());
 }
@@ -73,9 +76,11 @@
 
         CameraMetadata metadata;
         std::vector<int32_t> tagsNeedingPermission;
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                /*overrideToPortrait*/false, clientAttribution, /*devicePolicy*/0, &metadata);
         ASSERT_TRUE(rc.isOk());
         EXPECT_FALSE(metadata.isEmpty());
         EXPECT_EQ(metadata.removePermissionEntries(CAMERA_METADATA_INVALID_VENDOR_ID,
diff --git a/camera/tests/CameraZSLTests.cpp b/camera/tests/CameraZSLTests.cpp
index 56fcfa4..2740d09 100644
--- a/camera/tests/CameraZSLTests.cpp
+++ b/camera/tests/CameraZSLTests.cpp
@@ -19,6 +19,7 @@
 
 #include <gtest/gtest.h>
 
+#include <android/content/AttributionSourceState.h>
 #include <binder/ProcessState.h>
 #include <utils/Errors.h>
 #include <utils/Log.h>
@@ -84,8 +85,10 @@
     sp<IServiceManager> sm = defaultServiceManager();
     sp<IBinder> binder = sm->getService(String16("media.camera"));
     mCameraService = interface_cast<ICameraService>(binder);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     rc = mCameraService->getNumberOfCameras(
-            hardware::ICameraService::CAMERA_TYPE_ALL, kDefaultDeviceId, /*devicePolicy*/0,
+            hardware::ICameraService::CAMERA_TYPE_ALL, clientAttribution, /*devicePolicy*/0,
             &numCameras);
     EXPECT_TRUE(rc.isOk());
 
@@ -183,9 +186,11 @@
         }
 
         CameraMetadata metadata;
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
         rc = mCameraService->getCameraCharacteristics(cameraIdStr,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, /*overrideToPortrait*/false,
-                kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+                clientAttribution, /*devicePolicy*/0, &metadata);
         if (!rc.isOk()) {
             // The test is relevant only for cameras with Hal 3.x
             // support.
@@ -209,11 +214,12 @@
             continue;
         }
 
+        clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+        clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+        clientAttribution.packageName = "ZSLTest";
         rc = mCameraService->connect(this, cameraId,
-                "ZSLTest", hardware::ICameraService::USE_CALLING_UID,
-                hardware::ICameraService::USE_CALLING_PID,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__,
-                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, kDefaultDeviceId,
+                /*overrideToPortrait*/false, /*forceSlowJpegMode*/false, clientAttribution,
                 /*devicePolicy*/0, &cameraDevice);
         EXPECT_TRUE(rc.isOk());
 
diff --git a/camera/tests/fuzzer/Android.bp b/camera/tests/fuzzer/Android.bp
index bd97c39..3b6413c 100644
--- a/camera/tests/fuzzer/Android.bp
+++ b/camera/tests/fuzzer/Android.bp
@@ -31,6 +31,7 @@
     ],
     shared_libs: [
         "camera_platform_flags_c_lib",
+        "framework-permission-aidl-cpp",
         "libbase",
         "libcutils",
         "libutils",
diff --git a/camera/tests/fuzzer/camera_fuzzer.cpp b/camera/tests/fuzzer/camera_fuzzer.cpp
index b0f59f1..f46d246 100644
--- a/camera/tests/fuzzer/camera_fuzzer.cpp
+++ b/camera/tests/fuzzer/camera_fuzzer.cpp
@@ -17,6 +17,7 @@
 #include <Camera.h>
 #include <CameraParameters.h>
 #include <CameraUtils.h>
+#include <android/content/AttributionSourceState.h>
 #include <binder/MemoryDealer.h>
 #include <fuzzer/FuzzedDataProvider.h>
 #include <gui/Surface.h>
@@ -123,21 +124,24 @@
     sp<ICameraService> cameraService = nullptr;
     cameraService = interface_cast<ICameraService>(binder);
     sp<ICamera> cameraDevice = nullptr;
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
     if (mFDP->ConsumeBool()) {
-        cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */, "CAMERAFUZZ",
-                               hardware::ICameraService::USE_CALLING_UID,
-                               hardware::ICameraService::USE_CALLING_PID,
+        clientAttribution.uid = hardware::ICameraService::USE_CALLING_UID;
+        clientAttribution.pid = hardware::ICameraService::USE_CALLING_PID;
+        clientAttribution.packageName = "CAMERAFUZZ";
+        cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
                                /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                                /*overrideToPortrait*/ false, /*forceSlowJpegMode*/ false,
-                               kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
+                               clientAttribution, /*devicePolicy*/0, &cameraDevice);
     } else {
+        clientAttribution.uid = mFDP->ConsumeIntegral<int8_t>();
+        clientAttribution.pid = mFDP->ConsumeIntegral<int8_t>();
+        clientAttribution.packageName = mFDP->ConsumeRandomLengthString(kMaxBytes).c_str();
         cameraService->connect(this, mFDP->ConsumeIntegral<int32_t>() /* cameraId */,
-                               mFDP->ConsumeRandomLengthString(kMaxBytes).c_str(),
-                               mFDP->ConsumeIntegral<int8_t>() /* clientUid */,
-                               mFDP->ConsumeIntegral<int8_t>() /* clientPid */,
                                /*targetSdkVersion*/ mFDP->ConsumeIntegral<int32_t>(),
                                /*overrideToPortrait*/ mFDP->ConsumeBool(),
-                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), kDefaultDeviceId,
+                               /*forceSlowJpegMode*/ mFDP->ConsumeBool(), clientAttribution,
                                /*devicePolicy*/0, &cameraDevice);
     }
 
@@ -165,13 +169,15 @@
     }
 
     int32_t cameraId = mFDP->ConsumeIntegral<int32_t>();
-    Camera::getNumberOfCameras(kDefaultDeviceId, /*devicePolicy*/0);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    Camera::getNumberOfCameras(clientAttribution, /*devicePolicy*/0);
     CameraInfo cameraInfo;
     cameraInfo.facing = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidFacing)
                                             : mFDP->ConsumeIntegral<int32_t>();
     cameraInfo.orientation = mFDP->ConsumeBool() ? mFDP->PickValueInArray(kValidOrientation)
                                                  : mFDP->ConsumeIntegral<int32_t>();
-    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, kDefaultDeviceId,
+    Camera::getCameraInfo(cameraId, /*overrideToPortrait*/false, clientAttribution,
                           /*devicePolicy*/0, &cameraInfo);
     mCamera->reconnect();
 
diff --git a/media/audio/aconfig/audio.aconfig b/media/audio/aconfig/audio.aconfig
index c3ae59c..9221c04 100644
--- a/media/audio/aconfig/audio.aconfig
+++ b/media/audio/aconfig/audio.aconfig
@@ -83,6 +83,14 @@
 }
 
 flag {
+    name: "ring_my_car"
+    namespace: "media_audio"
+    description:
+        "Incoming ringtones will not be muted based on ringer mode when connected to a car"
+    bug: "319515324"
+}
+
+flag {
     name: "ringer_mode_affects_alarm"
     namespace: "media_audio"
     description:
diff --git a/media/audio/aconfig/audioserver.aconfig b/media/audio/aconfig/audioserver.aconfig
index 6f76408..d1c6239 100644
--- a/media/audio/aconfig/audioserver.aconfig
+++ b/media/audio/aconfig/audioserver.aconfig
@@ -29,6 +29,14 @@
 }
 
 flag {
+    name: "fix_call_audio_patch"
+    namespace: "media_audio"
+    description:
+        "optimize creation and release of audio patches for call routing"
+    bug: "292492229"
+}
+
+flag {
     name: "fix_concurrent_playback_behavior_with_bit_perfect_client"
     namespace: "media_audio"
     description:
@@ -57,6 +65,22 @@
 }
 
 flag {
+    name: "portid_volume_management"
+    namespace: "media_audio"
+    description:
+        "Allows to manage volume by port id within audio flinger instead of legacy stream type."
+    bug: "317212590"
+}
+
+flag {
+    name: "power_stats"
+    namespace: "media_audio"
+    description:
+        "Add power stats tracking and management."
+    bug: "350114693"
+}
+
+flag {
     name: "use_bt_sco_for_media"
     namespace: "media_audio"
     description:
diff --git a/media/audio/aconfig/soundtrigger.aconfig b/media/audio/aconfig/soundtrigger.aconfig
index e61e6a3..5233119 100644
--- a/media/audio/aconfig/soundtrigger.aconfig
+++ b/media/audio/aconfig/soundtrigger.aconfig
@@ -6,9 +6,18 @@
 container: "system"
 
 flag {
-    name: "sound_trigger_generic_model_api"
+    name: "generic_model_api"
     is_exported: true
-    namespace: "soundtrigger"
+    namespace: "media_audio"
     description: "Feature flag for adding GenericSoundModel to SystemApi"
     bug: "339267254"
 }
+
+flag {
+    name: "manager_api"
+    is_exported: true
+    namespace: "media_audio"
+    description: "Feature flag for adding SoundTriggerManager API to SystemApi"
+    bug: "339267254"
+}
+
diff --git a/media/audioaidlconversion/AidlConversionNdk.cpp b/media/audioaidlconversion/AidlConversionNdk.cpp
index 9b14a5e..7ab616a 100644
--- a/media/audioaidlconversion/AidlConversionNdk.cpp
+++ b/media/audioaidlconversion/AidlConversionNdk.cpp
@@ -35,6 +35,7 @@
 
 using hardware::audio::common::PlaybackTrackMetadata;
 using hardware::audio::common::RecordTrackMetadata;
+using hardware::audio::common::SourceMetadata;
 using ::android::BAD_VALUE;
 using ::android::OK;
 
@@ -194,5 +195,16 @@
     return aidl;
 }
 
+// static
+ConversionResult<SourceMetadata>
+legacy2aidl_SourceMetadata(const std::vector<playback_track_metadata_v7_t>& legacy) {
+    SourceMetadata aidl;
+    aidl.tracks = VALUE_OR_RETURN(
+            convertContainer<std::vector<PlaybackTrackMetadata>>(
+                    legacy,
+                    legacy2aidl_playback_track_metadata_v7_PlaybackTrackMetadata));
+    return aidl;
+}
+
 }  // namespace android
 }  // aidl
diff --git a/media/audioaidlconversion/include/media/AidlConversionNdk.h b/media/audioaidlconversion/include/media/AidlConversionNdk.h
index 813a728..b5888b3 100644
--- a/media/audioaidlconversion/include/media/AidlConversionNdk.h
+++ b/media/audioaidlconversion/include/media/AidlConversionNdk.h
@@ -28,6 +28,7 @@
 
 #include <aidl/android/hardware/audio/common/PlaybackTrackMetadata.h>
 #include <aidl/android/hardware/audio/common/RecordTrackMetadata.h>
+#include <aidl/android/hardware/audio/common/SourceMetadata.h>
 #include <aidl/android/media/audio/common/AudioConfig.h>
 #include <media/AidlConversionUtil.h>
 
@@ -56,5 +57,8 @@
 ConversionResult<hardware::audio::common::RecordTrackMetadata>
 legacy2aidl_record_track_metadata_v7_RecordTrackMetadata(const record_track_metadata_v7& legacy);
 
+ConversionResult<hardware::audio::common::SourceMetadata>
+legacy2aidl_SourceMetadata(const std::vector<playback_track_metadata_v7_t>& legacy);
+
 }  // namespace android
 }  // namespace aidl
diff --git a/media/codec2/components/cmds/codec2.cpp b/media/codec2/components/cmds/codec2.cpp
index a17b04e..ca65aa2 100644
--- a/media/codec2/components/cmds/codec2.cpp
+++ b/media/codec2/components/cmds/codec2.cpp
@@ -46,7 +46,6 @@
 #include <media/stagefright/Utils.h>
 
 #include <gui/GLConsumer.h>
-#include <gui/IProducerListener.h>
 #include <gui/Surface.h>
 #include <gui/SurfaceComposerClient.h>
 
@@ -91,7 +90,7 @@
     std::shared_ptr<Listener> mListener;
     std::shared_ptr<C2Component> mComponent;
 
-    sp<IProducerListener> mProducerListener;
+    sp<SurfaceListener> mSurfaceListener;
 
     std::atomic_int mLinearPoolId;
 
@@ -138,7 +137,7 @@
 
 SimplePlayer::SimplePlayer()
     : mListener(new Listener(this)),
-      mProducerListener(new StubProducerListener),
+      mSurfaceListener(new StubSurfaceListener),
       mLinearPoolId(C2BlockPool::PLATFORM_START),
       mComposerClient(new SurfaceComposerClient) {
     CHECK_EQ(mComposerClient->initCheck(), (status_t)OK);
@@ -164,7 +163,7 @@
 
     mSurface = mControl->getSurface();
     CHECK(mSurface != nullptr);
-    mSurface->connect(NATIVE_WINDOW_API_CPU, mProducerListener);
+    mSurface->connect(NATIVE_WINDOW_API_CPU, mSurfaceListener);
 }
 
 SimplePlayer::~SimplePlayer() {
diff --git a/media/libaaudio/fuzzer/Android.bp b/media/libaaudio/fuzzer/Android.bp
index ba231c1..a1551f8 100644
--- a/media/libaaudio/fuzzer/Android.bp
+++ b/media/libaaudio/fuzzer/Android.bp
@@ -46,9 +46,9 @@
     ],
     static_libs: [
         "aaudio-aidl-cpp",
+        "audio-permission-aidl-cpp",
         "audioclient-types-aidl-cpp",
         "audioflinger-aidl-cpp",
-        "audio-permission-aidl-cpp",
         "audiopolicy-aidl-cpp",
         "audiopolicy-types-aidl-cpp",
         "av-types-aidl-cpp",
diff --git a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
index 8595308..255bd0f 100644
--- a/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
+++ b/media/libaaudio/src/legacy/AudioStreamLegacy.cpp
@@ -261,6 +261,11 @@
 
 void AudioStreamLegacy::onAudioDeviceUpdate(audio_io_handle_t /* audioIo */,
             audio_port_handle_t deviceId) {
+    // Check for an invalid deviceId. Why change to UNSPECIFIED?
+    if (deviceId == AAUDIO_UNSPECIFIED) {
+        ALOGE("%s(, deviceId = AAUDIO_UNSPECIFIED)! Why?", __func__);
+        return;
+    }
     // Device routing is a common source of errors and DISCONNECTS.
     // Please leave this log in place. If there is a bug then this might
     // get called after the stream has been deleted so log before we
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 3f4fcfd..3602e94 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -322,23 +322,6 @@
     return NO_ERROR;
 }
 
-status_t AudioSystem::getStreamVolume(audio_stream_type_t stream, float* volume,
-                                      audio_io_handle_t output) {
-    if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger> af = get_audio_flinger();
-    if (af == 0) return PERMISSION_DENIED;
-    *volume = af->streamVolume(stream, output);
-    return NO_ERROR;
-}
-
-status_t AudioSystem::getStreamMute(audio_stream_type_t stream, bool* mute) {
-    if (uint32_t(stream) >= AUDIO_STREAM_CNT) return BAD_VALUE;
-    const sp<IAudioFlinger> af = get_audio_flinger();
-    if (af == 0) return PERMISSION_DENIED;
-    *mute = af->streamMute(stream);
-    return NO_ERROR;
-}
-
 status_t AudioSystem::setMode(audio_mode_t mode) {
     if (uint32_t(mode) >= AUDIO_MODE_CNT) return BAD_VALUE;
     const sp<IAudioFlinger> af = get_audio_flinger();
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index e2386ca..161c4d5 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -2401,12 +2401,14 @@
     int32_t flags = android_atomic_and(
         ~(CBLK_UNDERRUN | CBLK_LOOP_CYCLE | CBLK_LOOP_FINAL | CBLK_BUFFER_END), &mCblk->mFlags);
 
+    const bool isOffloaded = isOffloaded_l();
+    const bool isOffloadedOrDirect = isOffloadedOrDirect_l();
     // Check for track invalidation
     if (flags & CBLK_INVALID) {
         // for offloaded tracks restoreTrack_l() will just update the sequence and clear
         // AudioSystem cache. We should not exit here but after calling the callback so
         // that the upper layers can recreate the track
-        if (!isOffloadedOrDirect_l() || (mSequence == mObservedSequence)) {
+        if (!isOffloadedOrDirect || (mSequence == mObservedSequence)) {
             status_t status __unused = restoreTrack_l("processAudioBuffer");
             // FIXME unused status
             // after restoration, continue below to make sure that the loop and buffer events
@@ -2576,7 +2578,7 @@
         mObservedSequence = sequence;
         callback->onNewIAudioTrack();
         // for offloaded tracks, just wait for the upper layers to recreate the track
-        if (isOffloadedOrDirect()) {
+        if (isOffloadedOrDirect) {
             return NS_INACTIVE;
         }
     }
@@ -2664,7 +2666,7 @@
                 __func__, mPortId, mRemainingFrames, avail, audioBuffer.frameCount, nonContig, err);
         if (err != NO_ERROR) {
             if (err == TIMED_OUT || err == WOULD_BLOCK || err == -EINTR ||
-                    (isOffloaded() && (err == DEAD_OBJECT))) {
+                    (isOffloaded && (err == DEAD_OBJECT))) {
                 // FIXME bug 25195759
                 return 1000000;
             }
@@ -2750,7 +2752,7 @@
             // buffer size and skip the loop entirely.
 
             nsecs_t myns;
-            if (audio_has_proportional_frames(mFormat)) {
+            if (!isOffloaded && audio_has_proportional_frames(mFormat)) {
                 // time to wait based on buffer occupancy
                 const nsecs_t datans = mRemainingFrames <= avail ? 0 :
                         framesToNanoseconds(mRemainingFrames - avail, sampleRate, speed);
diff --git a/media/libaudioclient/IAudioFlinger.cpp b/media/libaudioclient/IAudioFlinger.cpp
index a707909..e0dca2d 100644
--- a/media/libaudioclient/IAudioFlinger.cpp
+++ b/media/libaudioclient/IAudioFlinger.cpp
@@ -350,34 +350,6 @@
     return statusTFromBinderStatus(mDelegate->setStreamMute(streamAidl, muted));
 }
 
-float AudioFlingerClientAdapter::streamVolume(audio_stream_type_t stream,
-                                              audio_io_handle_t output) const {
-    auto result = [&]() -> ConversionResult<float> {
-        AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
-                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-        int32_t outputAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_io_handle_t_int32_t(output));
-        float aidlRet;
-        RETURN_IF_ERROR(statusTFromBinderStatus(
-                mDelegate->streamVolume(streamAidl, outputAidl, &aidlRet)));
-        return aidlRet;
-    }();
-    // Failure is ignored.
-    return result.value_or(0.f);
-}
-
-bool AudioFlingerClientAdapter::streamMute(audio_stream_type_t stream) const {
-    auto result = [&]() -> ConversionResult<bool> {
-        AudioStreamType streamAidl = VALUE_OR_RETURN_STATUS(
-                legacy2aidl_audio_stream_type_t_AudioStreamType(stream));
-        bool aidlRet;
-        RETURN_IF_ERROR(statusTFromBinderStatus(
-                mDelegate->streamMute(streamAidl, &aidlRet)));
-        return aidlRet;
-    }();
-    // Failure is ignored.
-    return result.value_or(false);
-}
-
 status_t AudioFlingerClientAdapter::setMode(audio_mode_t mode) {
     AudioMode modeAidl = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_mode_t_AudioMode(mode));
     return statusTFromBinderStatus(mDelegate->setMode(modeAidl));
@@ -1040,23 +1012,6 @@
     return Status::fromStatusT(mDelegate->setStreamMute(streamLegacy, muted));
 }
 
-Status AudioFlingerServerAdapter::streamVolume(AudioStreamType stream, int32_t output,
-                                               float* _aidl_return) {
-    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
-    audio_io_handle_t outputLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_int32_t_audio_io_handle_t(output));
-    *_aidl_return = mDelegate->streamVolume(streamLegacy, outputLegacy);
-    return Status::ok();
-}
-
-Status AudioFlingerServerAdapter::streamMute(AudioStreamType stream, bool* _aidl_return) {
-    audio_stream_type_t streamLegacy = VALUE_OR_RETURN_BINDER(
-            aidl2legacy_AudioStreamType_audio_stream_type_t(stream));
-    *_aidl_return = mDelegate->streamMute(streamLegacy);
-    return Status::ok();
-}
-
 Status AudioFlingerServerAdapter::setMode(AudioMode mode) {
     audio_mode_t modeLegacy = VALUE_OR_RETURN_BINDER(aidl2legacy_AudioMode_audio_mode_t(mode));
     return Status::fromStatusT(mDelegate->setMode(modeLegacy));
diff --git a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
index 4f00f83..29de9c2 100644
--- a/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
+++ b/media/libaudioclient/aidl/android/media/IAudioFlingerService.aidl
@@ -94,13 +94,11 @@
     float getMasterBalance();
 
     /*
-     * Set/gets stream type state. This will probably be used by
+     * Set stream type state. This will probably be used by
      * the preference panel, mostly.
      */
     void setStreamVolume(AudioStreamType stream, float value, int /* audio_io_handle_t */ output);
     void setStreamMute(AudioStreamType stream, boolean muted);
-    float streamVolume(AudioStreamType stream, int /* audio_io_handle_t */ output);
-    boolean streamMute(AudioStreamType stream);
 
     // set audio mode.
     void setMode(AudioMode mode);
diff --git a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
index ddda8bb..73610a8 100644
--- a/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
+++ b/media/libaudioclient/aidl/android/media/OpenOutputRequest.aidl
@@ -19,6 +19,7 @@
 import android.media.AudioPortFw;
 import android.media.audio.common.AudioConfig;
 import android.media.audio.common.AudioConfigBase;
+import android.media.audio.common.AudioAttributes;
 
 /**
  * {@hide}
@@ -32,4 +33,5 @@
     AudioPortFw device;
     /** Bitmask, indexed by AudioOutputFlag. */
     int flags;
+    AudioAttributes attributes;
 }
diff --git a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
index dfdb4cf..4c94974 100644
--- a/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
+++ b/media/libaudioclient/fuzzer/audioflinger_fuzzer.cpp
@@ -519,12 +519,6 @@
     stream = getValue(&mFdp, kStreamtypes);
     AudioSystem::getOutputLatency(&latency, stream);
 
-    stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::getStreamVolume(stream, &volume, mFdp.ConsumeIntegral<int32_t>());
-
-    stream = getValue(&mFdp, kStreamtypes);
-    AudioSystem::getStreamMute(stream, &state);
-
     uint32_t samplingRate;
     AudioSystem::getSamplingRate(mFdp.ConsumeIntegral<int32_t>(), &samplingRate);
 
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 9cfd540..67b3dcd 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -124,15 +124,12 @@
     static status_t setMasterMute(bool mute);
     static status_t getMasterMute(bool* mute);
 
-    // set/get stream volume on specified output
+    // set stream volume on specified output
     static status_t setStreamVolume(audio_stream_type_t stream, float value,
                                     audio_io_handle_t output);
-    static status_t getStreamVolume(audio_stream_type_t stream, float* volume,
-                                    audio_io_handle_t output);
 
     // mute/unmute stream
     static status_t setStreamMute(audio_stream_type_t stream, bool mute);
-    static status_t getStreamMute(audio_stream_type_t stream, bool* mute);
 
     // set audio mode in audio hardware
     static status_t setMode(audio_mode_t mode);
diff --git a/media/libaudioclient/include/media/IAudioFlinger.h b/media/libaudioclient/include/media/IAudioFlinger.h
index 211fffa..667e9ae 100644
--- a/media/libaudioclient/include/media/IAudioFlinger.h
+++ b/media/libaudioclient/include/media/IAudioFlinger.h
@@ -229,10 +229,6 @@
                                     audio_io_handle_t output) = 0;
     virtual     status_t    setStreamMute(audio_stream_type_t stream, bool muted) = 0;
 
-    virtual     float       streamVolume(audio_stream_type_t stream,
-                                    audio_io_handle_t output) const = 0;
-    virtual     bool        streamMute(audio_stream_type_t stream) const = 0;
-
     // set audio mode
     virtual     status_t    setMode(audio_mode_t mode) = 0;
 
@@ -424,9 +420,6 @@
     status_t setStreamVolume(audio_stream_type_t stream, float value,
                              audio_io_handle_t output) override;
     status_t setStreamMute(audio_stream_type_t stream, bool muted) override;
-    float streamVolume(audio_stream_type_t stream,
-                       audio_io_handle_t output) const override;
-    bool streamMute(audio_stream_type_t stream) const override;
     status_t setMode(audio_mode_t mode) override;
     status_t setMicMute(bool state) override;
     bool getMicMute() const override;
@@ -549,8 +542,6 @@
             MASTER_MUTE = media::BnAudioFlingerService::TRANSACTION_masterMute,
             SET_STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_setStreamVolume,
             SET_STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_setStreamMute,
-            STREAM_VOLUME = media::BnAudioFlingerService::TRANSACTION_streamVolume,
-            STREAM_MUTE = media::BnAudioFlingerService::TRANSACTION_streamMute,
             SET_MODE = media::BnAudioFlingerService::TRANSACTION_setMode,
             SET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_setMicMute,
             GET_MIC_MUTE = media::BnAudioFlingerService::TRANSACTION_getMicMute,
@@ -673,9 +664,6 @@
     Status setStreamVolume(media::audio::common::AudioStreamType stream,
                            float value, int32_t output) override;
     Status setStreamMute(media::audio::common::AudioStreamType stream, bool muted) override;
-    Status streamVolume(media::audio::common::AudioStreamType stream,
-                        int32_t output, float* _aidl_return) override;
-    Status streamMute(media::audio::common::AudioStreamType stream, bool* _aidl_return) override;
     Status setMode(media::audio::common::AudioMode mode) override;
     Status setMicMute(bool state) override;
     Status getMicMute(bool* _aidl_return) override;
diff --git a/media/libaudioclient/tests/audioeffect_analyser.cpp b/media/libaudioclient/tests/audioeffect_analyser.cpp
index f4d37bc..199fb8b 100644
--- a/media/libaudioclient/tests/audioeffect_analyser.cpp
+++ b/media/libaudioclient/tests/audioeffect_analyser.cpp
@@ -62,6 +62,15 @@
 constexpr int kNPointFFT = 16384;
 constexpr float kBinWidth = (float)kSamplingFrequency / kNPointFFT;
 
+// frequency used to generate testing tone
+constexpr uint32_t kTestFrequency = 1400;
+
+// Tolerance of audio gain difference in dB, which is 10^(0.1/20) (around 1.0116%) difference in
+// amplitude
+constexpr float kAudioGainDiffTolerancedB = .1f;
+
+const std::string kDataTempPath = "/data/local/tmp";
+
 const char* gPackageName = "AudioEffectAnalyser";
 
 static_assert(kPrimeDurationInSec + 2 * kNPointFFT / kSamplingFrequency < kCaptureDurationSec,
@@ -177,21 +186,30 @@
     return effect;
 }
 
-void computeFilterGainsAtTones(float captureDuration, int nPointFft, std::vector<int>& binOffsets,
-                               float* inputMag, float* gaindB, const char* res,
-                               audio_session_t sessionId) {
+void computeFilterGainsAtTones(float captureDuration, int nPointFft, std::vector<int> binOffsets,
+                               float* inputMag, float* gaindB, const std::string res,
+                               audio_session_t sessionId, const std::string res2 = "",
+                               audio_session_t sessionId2 = AUDIO_SESSION_NONE) {
     int totalFrameCount = captureDuration * kSamplingFrequency;
     auto output = pffft::AlignedVector<float>(totalFrameCount);
     auto fftOutput = pffft::AlignedVector<float>(nPointFft);
-    PlaybackEnv argsP;
-    argsP.mRes = std::string{res};
+    PlaybackEnv argsP, argsP2;
+    argsP.mRes = res;
     argsP.mSessionId = sessionId;
     CaptureEnv argsR;
     argsR.mCaptureDuration = captureDuration;
     std::thread playbackThread(&PlaybackEnv::play, &argsP);
+    std::optional<std::thread> playbackThread2;
+    if (res2 != "") {
+        argsP2 = {.mSessionId = sessionId2, .mRes = res2};
+        playbackThread2 = std::thread(&PlaybackEnv::play, &argsP2);
+    }
     std::thread captureThread(&CaptureEnv::capture, &argsR);
     captureThread.join();
     playbackThread.join();
+    if (playbackThread2 != std::nullopt) {
+        playbackThread2->join();
+    }
     ASSERT_EQ(OK, argsR.mStatus) << argsR.mMsg;
     ASSERT_EQ(OK, argsP.mStatus) << argsP.mMsg;
     ASSERT_FALSE(argsR.mDumpFileName.empty()) << "recorded not written to file";
@@ -210,7 +228,11 @@
         auto k = binOffsets[i];
         auto outputMag = sqrt((fftOutput[k * 2] * fftOutput[k * 2]) +
                               (fftOutput[k * 2 + 1] * fftOutput[k * 2 + 1]));
-        gaindB[i] = 20 * log10(outputMag / inputMag[i]);
+        if (inputMag == nullptr) {
+            gaindB[i] = 20 * log10(outputMag);
+        } else {
+            gaindB[i] = 20 * log10(outputMag / inputMag[i]);
+        }
     }
 }
 
@@ -282,7 +304,7 @@
         inputMag[i] = sqrt((fftInput[k * 2] * fftInput[k * 2]) +
                            (fftInput[k * 2 + 1] * fftInput[k * 2 + 1]));
     }
-    TemporaryFile tf("/data/local/tmp");
+    TemporaryFile tf(kDataTempPath);
     close(tf.release());
     std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
     fout.write((char*)input.data(), input.size() * sizeof(input[0]));
@@ -386,7 +408,7 @@
         inputMag[i] = sqrt((fftInput[k * 2] * fftInput[k * 2]) +
                            (fftInput[k * 2 + 1] * fftInput[k * 2 + 1]));
     }
-    TemporaryFile tf("/data/local/tmp");
+    TemporaryFile tf(kDataTempPath);
     close(tf.release());
     std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
     fout.write((char*)input.data(), input.size() * sizeof(input[0]));
@@ -396,7 +418,7 @@
     memset(gainWithOutFilter, 0, sizeof(gainWithOutFilter));
     ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT, binOffsets,
                                                       inputMag, gainWithOutFilter, tf.path,
-                                                      AUDIO_SESSION_OUTPUT_MIX));
+                                                      AUDIO_SESSION_NONE));
     float diffA = gainWithOutFilter[0] - gainWithOutFilter[1];
     float prevGain = -100.f;
     for (auto strength = 150; strength < 1000; strength += strengthSupported ? 150 : 1000) {
@@ -421,6 +443,56 @@
     }
 }
 
+// assert the silent audio session with effect does not override the output audio
+TEST(AudioEffectTest, SilentAudioEffectSessionNotOverrideOutput) {
+    audio_session_t sessionId =
+            (audio_session_t)AudioSystem::newAudioUniqueId(AUDIO_UNIQUE_ID_USE_SESSION);
+    sp<AudioEffect> bassboost = createEffect(SL_IID_BASSBOOST, sessionId);
+    if ((bassboost->descriptor().flags & EFFECT_FLAG_HW_ACC_MASK) != 0) {
+        GTEST_SKIP() << "effect processed output inaccessible, skipping test";
+    }
+    ASSERT_EQ(OK, bassboost->initCheck());
+    ASSERT_EQ(NO_ERROR, bassboost->setEnabled(true));
+
+    const auto bin = roundToFreqCenteredToFftBin(kBinWidth, kTestFrequency);
+    const int binIndex = std::get<0 /* index */>(bin);
+    const int binFrequency = std::get<1 /* freq */>(bin);
+
+    const int totalFrameCount = kSamplingFrequency * kPlayBackDurationSec;
+    // input for effect module
+    auto silentAudio = pffft::AlignedVector<float>(totalFrameCount);
+    auto input = pffft::AlignedVector<float>(totalFrameCount);
+    generateMultiTone({binFrequency}, kSamplingFrequency, kPlayBackDurationSec, kDefAmplitude,
+                      input.data(), totalFrameCount);
+    TemporaryFile tf(kDataTempPath);
+    close(tf.release());
+    std::ofstream fout(tf.path, std::ios::out | std::ios::binary);
+    fout.write((char*)input.data(), input.size() * sizeof(input[0]));
+    fout.close();
+
+    // play non-silent audio file on AUDIO_SESSION_NONE
+    float audioGain, audioPlusSilentEffectGain;
+    ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(kCaptureDurationSec, kNPointFFT, {binIndex},
+                                                      nullptr, &audioGain, tf.path,
+                                                      AUDIO_SESSION_NONE));
+    EXPECT_FALSE(std::isinf(audioGain)) << "output gain should not be -inf";
+
+    TemporaryFile silentFile(kDataTempPath);
+    close(silentFile.release());
+    std::ofstream fSilent(silentFile.path, std::ios::out | std::ios::binary);
+    fSilent.write((char*)silentAudio.data(), silentAudio.size() * sizeof(silentAudio[0]));
+    fSilent.close();
+    // play non-silent audio file on AUDIO_SESSION_NONE and silent audio on sessionId, expect
+    // the new output gain to be almost same as last playback
+    ASSERT_NO_FATAL_FAILURE(computeFilterGainsAtTones(
+            kCaptureDurationSec, kNPointFFT, {binIndex}, nullptr, &audioPlusSilentEffectGain,
+            tf.path, AUDIO_SESSION_NONE, silentFile.path, sessionId));
+    EXPECT_FALSE(std::isinf(audioPlusSilentEffectGain))
+            << "output might have been overwritten in effect accumulate mode";
+    EXPECT_NEAR(audioGain, audioPlusSilentEffectGain, kAudioGainDiffTolerancedB)
+            << " output gain should almost same with one more silent audio stream";
+}
+
 int main(int argc, char** argv) {
     android::ProcessState::self()->startThreadPool();
     ::testing::InitGoogleTest(&argc, argv);
diff --git a/media/libaudioclient/tests/audiosystem_tests.cpp b/media/libaudioclient/tests/audiosystem_tests.cpp
index db998cd..742ca48 100644
--- a/media/libaudioclient/tests/audiosystem_tests.cpp
+++ b/media/libaudioclient/tests/audiosystem_tests.cpp
@@ -199,20 +199,6 @@
     EXPECT_EQ(origBalance, tstBalance);
 }
 
-TEST_F(AudioSystemTest, GetStreamVolume) {
-    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    float origStreamVol;
-    const auto [pbAudioIo, _] = mCbPlayback->getLastPortAndDevice();
-    EXPECT_EQ(NO_ERROR,
-              AudioSystem::getStreamVolume(AUDIO_STREAM_MUSIC, &origStreamVol, pbAudioIo));
-}
-
-TEST_F(AudioSystemTest, GetStreamMute) {
-    ASSERT_NO_FATAL_FAILURE(createPlaybackSession());
-    bool origMuteState;
-    EXPECT_EQ(NO_ERROR, AudioSystem::getStreamMute(AUDIO_STREAM_MUSIC, &origMuteState));
-}
-
 TEST_F(AudioSystemTest, StartAndStopAudioSource) {
     std::vector<struct audio_port_v7> ports;
     audio_port_config sourcePortConfig;
diff --git a/media/libaudiohal/impl/DeviceHalAidl.cpp b/media/libaudiohal/impl/DeviceHalAidl.cpp
index 976c1f6..4d780f5 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalAidl.cpp
@@ -24,6 +24,7 @@
 #include <aidl/android/hardware/audio/core/StreamDescriptor.h>
 #include <error/expected_utils.h>
 #include <media/AidlConversionCppNdk.h>
+#include <media/AidlConversionNdk.h>
 #include <media/AidlConversionNdkCpp.h>
 #include <media/AidlConversionUtil.h>
 #include <mediautils/TimeCheck.h>
@@ -60,6 +61,8 @@
 using aidl::android::hardware::audio::common::isBitPositionFlagSet;
 using aidl::android::hardware::audio::common::kDumpFromAudioServerArgument;
 using aidl::android::hardware::audio::common::RecordTrackMetadata;
+using aidl::android::hardware::audio::common::PlaybackTrackMetadata;
+using aidl::android::hardware::audio::common::SourceMetadata;
 using aidl::android::hardware::audio::core::sounddose::ISoundDose;
 using aidl::android::hardware::audio::core::AudioPatch;
 using aidl::android::hardware::audio::core::AudioRoute;
@@ -427,7 +430,8 @@
         audio_io_handle_t handle, audio_devices_t devices,
         audio_output_flags_t flags, struct audio_config* config,
         const char* address,
-        sp<StreamOutHalInterface>* outStream) {
+        sp<StreamOutHalInterface>* outStream,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
     ALOGD("%p %s::%s", this, getClassName().c_str(), __func__);
     TIME_CHECK();
     if (mModule == nullptr) return NO_INIT;
@@ -443,9 +447,12 @@
             ::aidl::android::legacy2aidl_audio_device_AudioDevice(devices, address));
     int32_t aidlOutputFlags = VALUE_OR_RETURN_STATUS(
             ::aidl::android::legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    SourceMetadata aidlMetadata = VALUE_OR_RETURN_STATUS(
+            ::aidl::android::legacy2aidl_SourceMetadata(sourceMetadata));
     AudioIoFlags aidlFlags = AudioIoFlags::make<AudioIoFlags::Tag::output>(aidlOutputFlags);
     AudioPortConfig mixPortConfig;
     AudioPatch aidlPatch;
+
     Hal2AidlMapper::Cleanups cleanups(mMapperAccessor);
     {
         std::lock_guard l(mLock);
@@ -475,6 +482,7 @@
     }
     args.bufferSizeFrames = aidlConfig.frameCount;
     args.eventCallback = eventCb;
+    args.sourceMetadata = aidlMetadata;
     ::aidl::android::hardware::audio::core::IModule::OpenOutputStreamReturn ret;
     RETURN_STATUS_IF_ERROR(statusTFromBinderStatus(mModule->openOutputStream(args, &ret)));
     StreamContextAidl context(ret.desc, isOffload);
diff --git a/media/libaudiohal/impl/DeviceHalAidl.h b/media/libaudiohal/impl/DeviceHalAidl.h
index 08455f1..6e823df 100644
--- a/media/libaudiohal/impl/DeviceHalAidl.h
+++ b/media/libaudiohal/impl/DeviceHalAidl.h
@@ -119,7 +119,9 @@
     // by releasing all references to the returned object.
     status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
                               audio_output_flags_t flags, struct audio_config* config,
-                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+                              const char* address, sp<StreamOutHalInterface>* outStream,
+                              const std::vector<playback_track_metadata_v7_t>&
+                                                               sourceMetadata = {}) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/DeviceHalHidl.cpp b/media/libaudiohal/impl/DeviceHalHidl.cpp
index ea4258c..b48c7ed 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.cpp
+++ b/media/libaudiohal/impl/DeviceHalHidl.cpp
@@ -259,7 +259,8 @@
         audio_output_flags_t flags,
         struct audio_config *config,
         const char *address,
-        sp<StreamOutHalInterface> *outStream) {
+        sp<StreamOutHalInterface> *outStream,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata) {
     TIME_CHECK();
     if (mDevice == 0) return NO_INIT;
     DeviceAddress hidlDevice;
@@ -273,6 +274,18 @@
         return status;
     }
 
+#if MAJOR_VERSION == 4
+    ::android::hardware::audio::CORE_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#else
+    ::android::hardware::audio::common::COMMON_TYPES_CPP_VERSION::SourceMetadata hidlMetadata;
+#endif
+
+    if (status_t status = CoreUtils::sourceMetadataFromHalV7(
+                sourceMetadata, true /*ignoreNonVendorTags*/, &hidlMetadata);
+            status != OK) {
+        return status;
+    }
+
 #if !(MAJOR_VERSION == 7 && MINOR_VERSION == 1)
     //TODO: b/193496180 use spatializer flag at audio HAL when available
     if ((flags & AUDIO_OUTPUT_FLAG_SPATIALIZER) != 0) {
@@ -294,7 +307,7 @@
 #endif
             handle, hidlDevice, hidlConfig, hidlFlags,
 #if MAJOR_VERSION >= 4
-            {} /* metadata */,
+            hidlMetadata /* metadata */,
 #endif
             [&](Result r, const sp<::android::hardware::audio::CPP_VERSION::IStreamOut>& result,
                     const AudioConfig& suggestedConfig) {
diff --git a/media/libaudiohal/impl/DeviceHalHidl.h b/media/libaudiohal/impl/DeviceHalHidl.h
index 1362dab..5f3e08c 100644
--- a/media/libaudiohal/impl/DeviceHalHidl.h
+++ b/media/libaudiohal/impl/DeviceHalHidl.h
@@ -73,7 +73,9 @@
     // by releasing all references to the returned object.
     status_t openOutputStream(audio_io_handle_t handle, audio_devices_t devices,
                               audio_output_flags_t flags, struct audio_config* config,
-                              const char* address, sp<StreamOutHalInterface>* outStream) override;
+                              const char* address, sp<StreamOutHalInterface>* outStream,
+                              const std::vector<playback_track_metadata_v7_t>&
+                                                                sourceMetadata = {}) override;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
index bdee7b6..e87993a 100644
--- a/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
+++ b/media/libaudiohal/impl/effectsAidlConversion/AidlConversionHapticGenerator.cpp
@@ -52,12 +52,17 @@
     switch (type) {
         case HG_PARAM_HAPTIC_INTENSITY: {
             int32_t id = 0, scale;
-            if (OK != param.readFromValue(&id) || OK != param.readFromValue(&scale)) {
+            float scaleFactor, adaptiveScaleFactor;
+            if (OK != param.readFromValue(&id) || OK != param.readFromValue(&scale) ||
+                OK != param.readFromValue(&scaleFactor) ||
+                OK != param.readFromValue(&adaptiveScaleFactor)) {
                 ALOGE("%s invalid intensity %s", __func__, param.toString().c_str());
                 return BAD_VALUE;
             }
-            HapticGenerator::HapticScale hpScale(
-                    {.id = id, .scale = (HapticGenerator::VibratorScale)(scale)});
+            HapticGenerator::HapticScale hpScale({.id = id,
+                                                  .scale = (HapticGenerator::VibratorScale)(scale),
+                                                  .scaleFactor = scaleFactor,
+                                                  .adaptiveScaleFactor = adaptiveScaleFactor});
             aidlParam = MAKE_SPECIFIC_PARAMETER(HapticGenerator, hapticGenerator, hapticScales,
                                                 {hpScale});
             break;
diff --git a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
index 7f6c1fb..3f16526 100644
--- a/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
+++ b/media/libaudiohal/include/media/audiohal/DeviceHalInterface.h
@@ -90,7 +90,8 @@
             audio_output_flags_t flags,
             struct audio_config *config,
             const char *address,
-            sp<StreamOutHalInterface> *outStream) = 0;
+            sp<StreamOutHalInterface> *outStream,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata = {}) = 0;
 
     // Creates and opens the audio hardware input stream. The stream is closed
     // by releasing all references to the returned object.
diff --git a/media/libaudioprocessing/AudioMixer.cpp b/media/libaudioprocessing/AudioMixer.cpp
index f9ae2d4..7ef9ff2 100644
--- a/media/libaudioprocessing/AudioMixer.cpp
+++ b/media/libaudioprocessing/AudioMixer.cpp
@@ -585,7 +585,7 @@
     t->mPlaybackRate = AUDIO_PLAYBACK_RATE_DEFAULT;
     // haptic
     t->mHapticPlaybackEnabled = false;
-    t->mHapticScale = {/*level=*/os::HapticLevel::NONE };
+    t->mHapticScale = os::HapticScale::none();
     t->mHapticMaxAmplitude = NAN;
     t->mMixerHapticChannelMask = AUDIO_CHANNEL_NONE;
     t->mMixerHapticChannelCount = 0;
diff --git a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
index f60d616..258dca2 100644
--- a/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
+++ b/media/libeffects/hapticgenerator/EffectHapticGenerator.cpp
@@ -35,12 +35,15 @@
 #include <audio_utils/format.h>
 #include <audio_utils/safe_math.h>
 #include <system/audio.h>
+#include <system/audio_effects/audio_effects_utils.h>
 
 static constexpr float DEFAULT_RESONANT_FREQUENCY = 150.0f;
 static constexpr float DEFAULT_BSF_ZERO_Q = 8.0f;
 static constexpr float DEFAULT_BSF_POLE_Q = 4.0f;
 static constexpr float DEFAULT_DISTORTION_OUTPUT_GAIN = 1.5f;
 
+using android::effect::utils::EffectParamReader;
+
 // This is the only symbol that needs to be exported
 __attribute__ ((visibility ("default")))
 audio_effect_library_t AUDIO_EFFECT_LIBRARY_INFO_SYM = {
@@ -307,28 +310,40 @@
     return 0;
 }
 
-int HapticGenerator_SetParameter(struct HapticGeneratorContext *context,
-                                 int32_t param,
-                                 uint32_t size,
-                                 void *value) {
-    switch (param) {
+int HapticGenerator_SetParameter(struct HapticGeneratorContext *context, effect_param_t* param) {
+    if (param == nullptr) {
+        ALOGE("%s invalid effect_param_t is nullptr", __func__);
+        return -EINVAL;
+    }
+    int32_t paramType;
+    EffectParamReader reader(*param);
+    reader.readFromParameter(&paramType);
+
+    switch (paramType) {
     case HG_PARAM_HAPTIC_INTENSITY: {
-        if (value == nullptr || size != (uint32_t) (2 * sizeof(int) + sizeof(float))) {
+        if (param->vsize != (sizeof(int32_t) + sizeof(os::HapticScale))) {
+            ALOGE("%s invalid haptic intensity param size %s", __func__, reader.toString().c_str());
             return -EINVAL;
         }
-        const int id = *(int *) value;
-        const os::HapticLevel hapticLevel = static_cast<os::HapticLevel>(*((int *) value + 1));
-        const float adaptiveScaleFactor = (*((float *) value + 2));
-        const os::HapticScale hapticScale = {hapticLevel, adaptiveScaleFactor};
-        ALOGD("Updating haptic scale, hapticLevel=%d, adaptiveScaleFactor=%f",
-              static_cast<int>(hapticLevel), adaptiveScaleFactor);
+        int32_t id, scaleLevel;
+        float scaleFactor, adaptiveScaleFactor;
+        if (reader.readFromValue(&id) != OK || reader.readFromValue(&scaleLevel) != OK ||
+            reader.readFromValue(&scaleFactor) != OK ||
+            reader.readFromValue(&adaptiveScaleFactor) != OK) {
+            ALOGE("%s error reading haptic intensity %s", __func__, reader.toString().c_str());
+            return -EINVAL;
+        }
+        os::HapticScale hapticScale(static_cast<os::HapticLevel>(scaleLevel), scaleFactor,
+                                    adaptiveScaleFactor);
+        ALOGD("Updating haptic scale, %s", hapticScale.toString().c_str());
         if (hapticScale.isScaleMute()) {
             context->param.id2HapticScale.erase(id);
         } else {
             context->param.id2HapticScale.emplace(id, hapticScale);
         }
         context->param.maxHapticScale = hapticScale;
-        for (const auto&[id, scale] : context->param.id2HapticScale) {
+        for (const auto&[_, scale] : context->param.id2HapticScale) {
+            // TODO(b/360314386): update to use new scale factors
             if (scale.getLevel() > context->param.maxHapticScale.getLevel()) {
                 context->param.maxHapticScale = scale;
             }
@@ -336,12 +351,17 @@
         break;
     }
     case HG_PARAM_VIBRATOR_INFO: {
-        if (value == nullptr || size != 3 * sizeof(float)) {
+        if (param->vsize != (3 * sizeof(float))) {
+            ALOGE("%s invalid vibrator info param size %s", __func__, reader.toString().c_str());
             return -EINVAL;
         }
-        const float resonantFrequency = *(float*) value;
-        const float qFactor = *((float *) value + 1);
-        const float maxAmplitude = *((float *) value + 2);
+        float resonantFrequency, qFactor, maxAmplitude;
+        if (reader.readFromValue(&resonantFrequency) != OK ||
+            reader.readFromValue(&qFactor) != OK ||
+            reader.readFromValue(&maxAmplitude) != OK) {
+            ALOGE("%s error reading vibrator info %s", __func__, reader.toString().c_str());
+            return -EINVAL;
+        }
         context->param.resonantFrequency =
                 audio_utils::safe_isnan(resonantFrequency) ? DEFAULT_RESONANT_FREQUENCY
                                                            : resonantFrequency;
@@ -369,7 +389,7 @@
         HapticGenerator_Reset(context);
     } break;
     default:
-        ALOGW("Unknown param: %d", param);
+        ALOGW("Unknown param: %d", paramType);
         return -EINVAL;
     }
 
@@ -573,8 +593,7 @@
                 return -EINVAL;
             }
             effect_param_t *cmd = (effect_param_t *) cmdData;
-            *(int *) replyData = HapticGenerator_SetParameter(
-                    context, *(int32_t *) cmd->data, cmd->vsize, cmd->data + sizeof(int32_t));
+            *(int *) replyData = HapticGenerator_SetParameter(context, cmd);
         }
             break;
 
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
index e040fec..535b886 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.cpp
@@ -36,7 +36,7 @@
     : EffectContext(statusDepth, common) {
     mState = HAPTIC_GENERATOR_STATE_UNINITIALIZED;
 
-    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
+    mParams.mMaxHapticScale = {.scale = HapticGenerator::VibratorScale::MUTE};
     mParams.mVibratorInfo.resonantFrequencyHz = DEFAULT_RESONANT_FREQUENCY;
     mParams.mVibratorInfo.qFactor = DEFAULT_BSF_ZERO_Q;
     mParams.mVibratorInfo.maxAmplitude = 0.f;
@@ -87,13 +87,17 @@
 RetCode HapticGeneratorContext::setHgHapticScales(
         const std::vector<HapticGenerator::HapticScale>& hapticScales) {
     for (auto hapticScale : hapticScales) {
-        mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale.scale);
+        mParams.mHapticScales.insert_or_assign(hapticScale.id, hapticScale);
     }
-    mParams.mMaxVibratorScale = HapticGenerator::VibratorScale::MUTE;
+    mParams.mMaxHapticScale = {.scale = HapticGenerator::VibratorScale::MUTE};
     for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
-        mParams.mMaxVibratorScale = std::max(mParams.mMaxVibratorScale, vibratorScale);
+        // TODO(b/360314386): update to use new scale factors
+        if (vibratorScale.scale > mParams.mMaxHapticScale.scale) {
+            mParams.mMaxHapticScale = vibratorScale;
+        }
     }
-    LOG(INFO) << " HapticGenerator VibratorScale set to " << toString(mParams.mMaxVibratorScale);
+    LOG(INFO) << " HapticGenerator VibratorScale set to "
+              << toString(mParams.mMaxHapticScale.scale);
     return RetCode::SUCCESS;
 }
 
@@ -103,8 +107,8 @@
 
 std::vector<HapticGenerator::HapticScale> HapticGeneratorContext::getHgHapticScales() const {
     std::vector<HapticGenerator::HapticScale> result;
-    for (const auto& [id, vibratorScale] : mParams.mHapticScales) {
-        result.push_back({id, vibratorScale});
+    for (const auto& [_, hapticScale] : mParams.mHapticScales) {
+        result.push_back(hapticScale);
     }
     return result;
 }
@@ -150,7 +154,7 @@
         return status;
     }
 
-    if (mParams.mMaxVibratorScale == HapticGenerator::VibratorScale::MUTE) {
+    if (mParams.mMaxHapticScale.scale == HapticGenerator::VibratorScale::MUTE) {
         // Haptic channels are muted, not need to generate haptic data.
         return {STATUS_OK, samples, samples};
     }
@@ -177,7 +181,10 @@
             runProcessingChain(mInputBuffer.data(), mOutputBuffer.data(), mFrameCount);
     ::android::os::scaleHapticData(
             hapticOutBuffer, hapticSampleCount,
-            {static_cast<::android::os::HapticLevel>(mParams.mMaxVibratorScale)} /* scale */,
+            ::android::os::HapticScale(
+                    static_cast<::android::os::HapticLevel>(mParams.mMaxHapticScale.scale),
+                    mParams.mMaxHapticScale.scaleFactor,
+                    mParams.mMaxHapticScale.adaptiveScaleFactor),
             mParams.mVibratorInfo.maxAmplitude /* limit */);
 
     // For haptic data, the haptic playback thread will copy the data from effect input
@@ -351,11 +358,12 @@
     ss << "\t\t- mAudioChannelCount: " << param.mAudioChannelCount << '\n';
     ss << "\t\t- mHapticChannelSource: " << param.mHapticChannelSource[0] << ", "
        << param.mHapticChannelSource[1] << '\n';
-    ss << "\t\t- mMaxVibratorScale: " << ::android::internal::ToString(param.mMaxVibratorScale)
-       << '\n';
+    ss << "\t\t- mMaxHapticScale: " << ::android::internal::ToString(param.mMaxHapticScale.scale)
+       << ", scaleFactor=" << param.mMaxHapticScale.scaleFactor
+       << ", adaptiveScaleFactor=" << param.mMaxHapticScale.adaptiveScaleFactor << '\n';
     ss << "\t\t- mVibratorInfo: " << param.mVibratorInfo.toString() << '\n';
     for (const auto& it : param.mHapticScales)
-        ss << "\t\t\t" << it.first << ": " << toString(it.second) << '\n';
+        ss << "\t\t\t" << it.first << ": " << toString(it.second.scale) << '\n';
 
     return ss.str();
 }
diff --git a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
index cf38e47..37532f6 100644
--- a/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
+++ b/media/libeffects/hapticgenerator/aidl/HapticGeneratorContext.h
@@ -41,9 +41,9 @@
     int mHapticChannelCount;
     int mAudioChannelCount;
 
-    std::map<int, HapticGenerator::VibratorScale> mHapticScales;
+    std::map<int, HapticGenerator::HapticScale> mHapticScales;
     // max intensity will be used to scale haptic data.
-    HapticGenerator::VibratorScale mMaxVibratorScale;
+    HapticGenerator::HapticScale mMaxHapticScale;
 
     HapticGenerator::VibratorInformation mVibratorInfo;
 };
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
index 3d4e955..3c8b809 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp
@@ -102,6 +102,10 @@
     switch (pcmEncoding) {
     case kAudioEncodingPcmFloat:
         return AUDIO_FORMAT_PCM_FLOAT;
+    case kAudioEncodingPcm32bit:
+        return AUDIO_FORMAT_PCM_32_BIT;
+    case kAudioEncodingPcm24bitPacked:
+        return AUDIO_FORMAT_PCM_24_BIT_PACKED;
     case kAudioEncodingPcm16bit:
         return AUDIO_FORMAT_PCM_16_BIT;
     case kAudioEncodingPcm8bit:
@@ -2025,7 +2029,12 @@
     if (offloadingAudio()) {
         AString mime;
         CHECK(format->findString("mime", &mime));
-        status_t err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        status_t err = OK;
+        if (audioFormat == AUDIO_FORMAT_PCM_16_BIT) {
+            // If there is probably no pcm-encoding in the format message, try to get the format by
+            // its mimetype.
+            err = mapMimeToAudioFormat(audioFormat, mime.c_str());
+        }
 
         if (err != OK) {
             ALOGE("Couldn't map mime \"%s\" to a valid "
diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
index e229844..26b8d0c 100644
--- a/media/libstagefright/CameraSource.cpp
+++ b/media/libstagefright/CameraSource.cpp
@@ -150,10 +150,15 @@
     int32_t cameraId, const std::string& clientName, uid_t clientUid, pid_t clientPid) {
 
     if (camera == 0) {
-        mCamera = Camera::connect(cameraId, clientName, clientUid, clientPid,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+        AttributionSourceState clientAttribution;
+        clientAttribution.pid = clientPid;
+        clientAttribution.uid = clientUid;
+        clientAttribution.deviceId = kDefaultDeviceId;
+        clientAttribution.packageName = clientName;
+
+        mCamera = Camera::connect(cameraId, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                /*forceSlowJpegMode*/false);
+                /*forceSlowJpegMode*/false, clientAttribution);
         if (mCamera == 0) return -EBUSY;
         mCameraFlags &= ~FLAGS_HOT_CAMERA;
     } else {
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 717106b..b380ade 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -840,7 +840,7 @@
     const sp<AMessage> mNotify;
 };
 
-class OnBufferReleasedListener : public ::android::BnProducerListener{
+class OnBufferReleasedListener : public ::android::SurfaceListener{
 private:
     uint32_t mGeneration;
     std::weak_ptr<BufferChannelBase> mBufferChannel;
@@ -871,6 +871,9 @@
         notifyBufferReleased();
     }
 
+    void onBuffersDiscarded([[maybe_unused]] const std::vector<sp<GraphicBuffer>>& buffers)
+        override { }
+
     void onBufferDetached([[maybe_unused]] int slot) override {
         notifyBufferReleased();
     }
@@ -6737,7 +6740,7 @@
             // to this surface after disconnect/connect, and those free frames would inherit the new
             // generation number. Disconnecting after setting a unique generation prevents this.
             nativeWindowDisconnect(surface.get(), "connectToSurface(reconnect)");
-            sp<IProducerListener> listener =
+            sp<SurfaceListener> listener =
                     new OnBufferReleasedListener(*generation, mBufferChannel);
             err = surfaceConnectWithListener(
                     surface, listener, "connectToSurface(reconnect-with-listener)");
diff --git a/media/libstagefright/SurfaceUtils.cpp b/media/libstagefright/SurfaceUtils.cpp
index 714e312..74432a6 100644
--- a/media/libstagefright/SurfaceUtils.cpp
+++ b/media/libstagefright/SurfaceUtils.cpp
@@ -335,7 +335,7 @@
 }
 
 status_t surfaceConnectWithListener(
-        const sp<Surface> &surface, sp<IProducerListener> listener, const char *reason) {
+        const sp<Surface> &surface, sp<SurfaceListener> listener, const char *reason) {
     ALOGD("connecting to surface %p, reason %s", surface.get(), reason);
 
     status_t err = surface->connect(NATIVE_WINDOW_API_MEDIA, listener);
diff --git a/media/libstagefright/include/media/stagefright/SurfaceUtils.h b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
index eccb413..882a5ab 100644
--- a/media/libstagefright/include/media/stagefright/SurfaceUtils.h
+++ b/media/libstagefright/include/media/stagefright/SurfaceUtils.h
@@ -27,7 +27,7 @@
 namespace android {
 
 struct HDRStaticInfo;
-class IProducerListener;
+class SurfaceListener;
 
 /**
  * Configures |nativeWindow| for given |width|x|height|, pixel |format|, |rotation| and |usage|.
@@ -45,7 +45,7 @@
 status_t nativeWindowConnect(ANativeWindow *surface, const char *reason);
 status_t nativeWindowDisconnect(ANativeWindow *surface, const char *reason);
 status_t surfaceConnectWithListener(const sp<Surface> &surface,
-        sp<IProducerListener> listener, const char *reason);
+        sp<SurfaceListener> listener, const char *reason);
 
 /**
  * Disable buffer dropping behavior of BufferQueue if target sdk of application
diff --git a/media/libstagefright/renderfright/gl/ProgramCache.cpp b/media/libstagefright/renderfright/gl/ProgramCache.cpp
index 350f0b7..ad6dd03 100644
--- a/media/libstagefright/renderfright/gl/ProgramCache.cpp
+++ b/media/libstagefright/renderfright/gl/ProgramCache.cpp
@@ -683,7 +683,7 @@
             fs << "uniform mat4 inputTransformMatrix;";
             fs << R"__SHADER__(
                 highp vec3 InputTransform(const highp vec3 color) {
-                    return clamp(vec3(inputTransformMatrix * vec4(color, 1.0)), 0.0, 1.0);
+                    return vec3(inputTransformMatrix * vec4(color, 1.0));
                 }
             )__SHADER__";
         } else {
diff --git a/media/psh_utils/Android.bp b/media/psh_utils/Android.bp
new file mode 100644
index 0000000..4662db8
--- /dev/null
+++ b/media/psh_utils/Android.bp
@@ -0,0 +1,47 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+// libraries that are included whole_static for test apps
+ndk_libs = [
+    "android.hardware.health-V3-ndk",
+    "android.hardware.power.stats-V1-ndk",
+]
+
+// Power, System, Health utils
+cc_library {
+    name: "libpshutils",
+    local_include_dirs: ["include"],
+    export_include_dirs: ["include"],
+    srcs: [
+        "HealthStats.cpp",
+        "HealthStatsProvider.cpp",
+        "PowerStats.cpp",
+        "PowerStatsCollector.cpp",
+        "PowerStatsProvider.cpp",
+    ],
+    shared_libs: [
+        "libaudioutils",
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    cflags: [
+        "-Wall",
+        "-Werror",
+        "-Wthread-safety",
+    ],
+    shared: {
+        shared_libs: ndk_libs,
+    },
+    static: {
+        whole_static_libs: ndk_libs,
+    },
+}
diff --git a/media/psh_utils/HealthStats.cpp b/media/psh_utils/HealthStats.cpp
new file mode 100644
index 0000000..5e767f6
--- /dev/null
+++ b/media/psh_utils/HealthStats.cpp
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <psh_utils/HealthStats.h>
+
+namespace android::media::psh_utils {
+
+template <typename T>
+const T& choose_voltage(const T& a, const T& b) {
+   return std::max(a, b);  // we use max here, could use avg.
+}
+
+std::string HealthStats::toString() const {
+    std::string result;
+    const float batteryVoltage = batteryVoltageMillivolts * 1e-3f;  // Volts
+    const float charge = batteryChargeCounterUah * (3600 * 1e-6);  // Joules = Amp-Second
+    result.append("{Net Battery V: ")
+            .append(std::to_string(batteryVoltage))
+            .append(" J: ")
+            .append(std::to_string(charge))
+            .append("}");
+    return result;
+}
+
+std::string HealthStats::normalizedEnergy(double timeSec) const {
+    std::string result;
+    const float batteryVoltage = batteryVoltageMillivolts * 1e-3f;   // Volts
+    const float charge = -batteryChargeCounterUah * (3600 * 1e-6f);  // Joules = Amp-Second
+    const float watts = charge * batteryVoltage / timeSec;
+    result.append("{Net Battery V: ")
+            .append(std::to_string(batteryVoltage))
+            .append(" J: ")
+            .append(std::to_string(charge))
+            .append(" W: ")
+            .append(std::to_string(watts))
+            .append("}");
+    return result;
+}
+
+HealthStats HealthStats::operator+=(const HealthStats& other) {
+    batteryVoltageMillivolts = choose_voltage(
+            batteryVoltageMillivolts, other.batteryVoltageMillivolts);
+    batteryFullChargeUah = std::max(batteryFullChargeUah, other.batteryFullChargeUah);
+    batteryChargeCounterUah += other.batteryChargeCounterUah;
+    return *this;
+}
+
+HealthStats HealthStats::operator-=(const HealthStats& other) {
+    batteryVoltageMillivolts = choose_voltage(
+            batteryVoltageMillivolts, other.batteryVoltageMillivolts);
+    batteryFullChargeUah = std::max(batteryFullChargeUah, other.batteryFullChargeUah);
+    batteryChargeCounterUah -= other.batteryChargeCounterUah;
+    return *this;
+}
+
+HealthStats HealthStats::operator+(const HealthStats& other) const {
+    HealthStats result = *this;
+    result += other;
+    return result;
+}
+
+HealthStats HealthStats::operator-(const HealthStats& other) const {
+    HealthStats result = *this;
+    result -= other;
+    return result;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/HealthStatsProvider.cpp b/media/psh_utils/HealthStatsProvider.cpp
new file mode 100644
index 0000000..de72463
--- /dev/null
+++ b/media/psh_utils/HealthStatsProvider.cpp
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PowerStatsProvider.h"
+#include <aidl/android/hardware/health/IHealth.h>
+#include <android-base/logging.h>
+#include <android/binder_manager.h>
+#include <psh_utils/ServiceSingleton.h>
+
+using ::aidl::android::hardware::health::HealthInfo;
+using ::aidl::android::hardware::health::IHealth;
+
+namespace android::media::psh_utils {
+
+static auto getHealthService() {
+    return getServiceSingleton<IHealth>();
+}
+
+status_t HealthStatsDataProvider::fill(PowerStats* stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    HealthStats& stats = stat->health_stats;
+    auto healthService = getHealthService();
+    if (healthService == nullptr) {
+        return NO_INIT;
+    }
+    HealthInfo healthInfo;
+    if (!healthService->getHealthInfo(&healthInfo).isOk()) {
+        LOG(ERROR) << __func__ << ": unable to get health info";
+        return INVALID_OPERATION;
+    }
+
+    stats.batteryVoltageMillivolts = healthInfo.batteryVoltageMillivolts;
+    stats.batteryFullChargeUah = healthInfo.batteryFullChargeUah;
+    stats.batteryChargeCounterUah = healthInfo.batteryChargeCounterUah;
+    return NO_ERROR;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStats.cpp b/media/psh_utils/PowerStats.cpp
new file mode 100644
index 0000000..f8f87c5
--- /dev/null
+++ b/media/psh_utils/PowerStats.cpp
@@ -0,0 +1,283 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <audio_utils/clock.h>
+#include <psh_utils/PowerStats.h>
+
+namespace android::media::psh_utils {
+
+// Determine the best start time from a and b, which is
+// min(a, b) if both exist, otherwise the one that exists.
+template <typename T>
+const T& choose_best_start_time(const T& a, const T& b) {
+    if (a) {
+        return b ? std::min(a, b) : a;
+    } else {
+        return b;
+    }
+}
+
+// subtract two time differences.
+template <typename T, typename U>
+const T sub_time_diff(const T& diff_a, const T& diff_b, const U& abs_c, const U& abs_d) {
+    if (diff_a) {
+        return diff_b ? (diff_a - diff_b) : diff_a;
+    } else if (diff_b) {
+        return diff_b;
+    } else {  // no difference exists, use absolute time.
+        return abs_c - abs_d;
+    }
+}
+
+std::string PowerStats::Metadata::toString() const {
+    return std::string("start_time_since_boot_ms: ").append(
+                    std::to_string(start_time_since_boot_ms))
+            .append(" start_time_monotonic_ms: ").append(std::to_string(start_time_monotonic_ms))
+            .append(audio_utils_time_string_from_ns(start_time_epoch_ms * 1'000'000).time)
+            .append(" duration_ms: ").append(std::to_string(duration_ms))
+            .append(" duration_monotonic_ms: ").append(std::to_string(duration_monotonic_ms));
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator+=(const Metadata& other) {
+    start_time_since_boot_ms = choose_best_start_time(
+            start_time_since_boot_ms, other.start_time_since_boot_ms);
+    start_time_epoch_ms = choose_best_start_time(
+            start_time_epoch_ms, other.start_time_epoch_ms);
+    start_time_monotonic_ms = choose_best_start_time(
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    duration_ms += other.duration_ms;
+    duration_monotonic_ms += other.duration_monotonic_ms;
+    return *this;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator-=(const Metadata& other) {
+    // here we calculate duration, if it makes sense.
+    duration_ms = sub_time_diff(duration_ms, other.duration_ms,
+                                start_time_since_boot_ms, other.start_time_since_boot_ms);
+    duration_monotonic_ms = sub_time_diff(
+            duration_monotonic_ms, other.duration_monotonic_ms,
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    start_time_since_boot_ms = choose_best_start_time(
+            start_time_since_boot_ms, other.start_time_since_boot_ms);
+    start_time_epoch_ms = choose_best_start_time(
+            start_time_epoch_ms, other.start_time_epoch_ms);
+    start_time_monotonic_ms = choose_best_start_time(
+            start_time_monotonic_ms, other.start_time_monotonic_ms);
+    return *this;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator+(const Metadata& other) const {
+    Metadata result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::Metadata PowerStats::Metadata::operator-(const Metadata& other) const {
+    Metadata result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::StateResidency::toString() const {
+    return std::string(entity_name).append(state_name)
+            .append(" ").append(std::to_string(time_ms))
+            .append(" ").append(std::to_string(entry_count));
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator+=(const StateResidency& other) {
+    if (entity_name.empty()) entity_name = other.entity_name;
+    if (state_name.empty()) state_name = other.state_name;
+    time_ms += other.time_ms;
+    entry_count += other.entry_count;
+    return *this;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator-=(const StateResidency& other) {
+    if (entity_name.empty()) entity_name = other.entity_name;
+    if (state_name.empty()) state_name = other.state_name;
+    time_ms -= other.time_ms;
+    entry_count -= other.entry_count;
+    return *this;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator+(
+        const StateResidency& other) const {
+    StateResidency result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::StateResidency PowerStats::StateResidency::operator-(
+        const StateResidency& other) const {
+    StateResidency result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::RailEnergy::toString() const {
+    return std::string(subsystem_name)
+            .append(rail_name)
+            .append(" ")
+            .append(std::to_string(energy_uws));
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator+=(const RailEnergy& other) {
+    if (subsystem_name.empty()) subsystem_name = other.subsystem_name;
+    if (rail_name.empty()) rail_name = other.rail_name;
+    energy_uws += other.energy_uws;
+    return *this;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator-=(const RailEnergy& other) {
+    if (subsystem_name.empty()) subsystem_name = other.subsystem_name;
+    if (rail_name.empty()) rail_name = other.rail_name;
+    energy_uws -= other.energy_uws;
+    return *this;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator+(const RailEnergy& other) const {
+    RailEnergy result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats::RailEnergy PowerStats::RailEnergy::operator-(const RailEnergy& other) const {
+    RailEnergy result = *this;
+    result -= other;
+    return result;
+}
+
+std::string PowerStats::toString(const std::string& prefix) const {
+    std::string result;
+    result.append(prefix).append(metadata.toString()).append("\n");
+    result.append(prefix).append(health_stats.toString()).append("\n");
+    for (const auto &residency: power_entity_state_residency) {
+        result.append(prefix).append(residency.toString()).append("\n");
+    }
+    for (const auto &energy: rail_energy) {
+        result.append(prefix).append(energy.toString()).append("\n");
+    }
+    return result;
+}
+
+std::string PowerStats::normalizedEnergy(const std::string& prefix) const {
+    if (metadata.duration_ms == 0) return {};
+
+    std::string result(prefix);
+    result.append(audio_utils_time_string_from_ns(
+            metadata.start_time_epoch_ms * 1'000'000).time);
+    result.append(" duration_boottime: ")
+            .append(std::to_string(metadata.duration_ms * 1e-3f))
+            .append(" duration_monotonic: ")
+            .append(std::to_string(metadata.duration_monotonic_ms * 1e-3f))
+            .append("\n");
+    if (health_stats.isValid()) {
+        result.append(prefix)
+                .append(health_stats.normalizedEnergy(metadata.duration_ms * 1e-3f)).append("\n");
+    }
+
+    // energy_uws is converted to ave W using recip time in us.
+    const float recipTime = 1e-3 / metadata.duration_ms;
+    int64_t total_energy = 0;
+    for (const auto& energy: rail_energy) {
+        total_energy += energy.energy_uws;
+        result.append(prefix).append(energy.subsystem_name)
+                .append(energy.rail_name)
+                .append(" ")
+                .append(std::to_string(energy.energy_uws * 1e-6))
+                .append(" ")
+                .append(std::to_string(energy.energy_uws * recipTime))
+                .append("\n");
+    }
+    if (total_energy != 0) {
+        result.append(prefix).append("total J and ave W: ")
+                .append(std::to_string(total_energy * 1e-6))
+                .append(" ")
+                .append(std::to_string(total_energy * recipTime))
+                .append("\n");
+    }
+    return result;
+}
+
+// seconds, joules, watts
+std::tuple<float, float, float> PowerStats::energyFrom(const std::string& railMatcher) const {
+    if (metadata.duration_ms == 0) return {};
+
+    // energy_uws is converted to ave W using recip time in us.
+    const float recipTime = 1e-3 / metadata.duration_ms;
+    int64_t total_energy = 0;
+    for (const auto& energy: rail_energy) {
+        if (energy.subsystem_name.find(railMatcher) != std::string::npos
+                || energy.rail_name.find(railMatcher) != std::string::npos) {
+            total_energy += energy.energy_uws;
+        }
+    }
+    return {metadata.duration_ms * 1e-3, total_energy * 1e-6, total_energy * recipTime};
+}
+
+PowerStats PowerStats::operator+=(const PowerStats& other) {
+    metadata += other.metadata;
+    health_stats += other.health_stats;
+    if (power_entity_state_residency.empty()) {
+        power_entity_state_residency = other.power_entity_state_residency;
+    } else {
+        for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
+            power_entity_state_residency[i] += other.power_entity_state_residency[i];
+        }
+    }
+    if (rail_energy.empty()) {
+        rail_energy = other.rail_energy;
+    } else {
+        for (size_t i = 0; i < rail_energy.size(); ++i) {
+            rail_energy[i] += other.rail_energy[i];
+        }
+    }
+    return *this;
+}
+
+PowerStats PowerStats::operator-=(const PowerStats& other) {
+    metadata -= other.metadata;
+    health_stats -= other.health_stats;
+    if (power_entity_state_residency.empty()) {
+        power_entity_state_residency = other.power_entity_state_residency;
+    } else {
+        for (size_t i = 0; i < power_entity_state_residency.size(); ++i) {
+            power_entity_state_residency[i] -= other.power_entity_state_residency[i];
+        }
+    }
+    if (rail_energy.empty()) {
+        rail_energy = other.rail_energy;
+    } else {
+        for (size_t i = 0; i < rail_energy.size(); ++i) {
+            rail_energy[i] -= other.rail_energy[i];
+        }
+    }
+    return *this;
+}
+
+PowerStats PowerStats::operator+(const PowerStats& other) const {
+    PowerStats result = *this;
+    result += other;
+    return result;
+}
+
+PowerStats PowerStats::operator-(const PowerStats& other) const {
+    PowerStats result = *this;
+    result -= other;
+    return result;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsCollector.cpp b/media/psh_utils/PowerStatsCollector.cpp
new file mode 100644
index 0000000..e5bf2aa
--- /dev/null
+++ b/media/psh_utils/PowerStatsCollector.cpp
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <android-base/logging.h>
+#include <psh_utils/PowerStatsCollector.h>
+#include "PowerStatsProvider.h"
+#include <utils/Timers.h>
+
+namespace android::media::psh_utils {
+
+PowerStatsCollector::PowerStatsCollector() {
+    addProvider(std::make_unique<PowerEntityResidencyDataProvider>());
+    addProvider(std::make_unique<RailEnergyDataProvider>());
+    addProvider(std::make_unique<HealthStatsDataProvider>());
+}
+
+/* static */
+PowerStatsCollector& PowerStatsCollector::getCollector() {
+    [[clang::no_destroy]] static PowerStatsCollector psc;
+    return psc;
+}
+
+std::shared_ptr<const PowerStats> PowerStatsCollector::getStats(int64_t toleranceNs) {
+    // Check if there is a cached PowerStats result available.
+    // As toleranceNs may be different between callers, it may be that some callers
+    // are blocked on mMutexExclusiveFill for a new stats result, while other callers
+    // may find the current cached result acceptable (within toleranceNs).
+    if (toleranceNs > 0) {
+        auto result = checkLastStats(toleranceNs);
+        if (result) return result;
+    }
+
+    // Take the mMutexExclusiveFill to ensure only one thread is filling.
+    std::lock_guard lg1(mMutexExclusiveFill);
+    // As obtaining a new PowerStats snapshot might take some time,
+    // check again to see if another waiting thread filled the cached result for us.
+    if (toleranceNs > 0) {
+        auto result = checkLastStats(toleranceNs);
+        if (result) return result;
+    }
+    auto result = std::make_shared<PowerStats>();
+    (void)fill(result.get());
+    std::lock_guard lg2(mMutex);
+    mLastFetchNs = systemTime(SYSTEM_TIME_BOOTTIME);
+    mLastFetchStats = result;
+    return result;
+}
+
+std::shared_ptr<const PowerStats> PowerStatsCollector::checkLastStats(int64_t toleranceNs) const {
+    if (toleranceNs > 0) {
+        // see if we can return an old result.
+        std::lock_guard lg(mMutex);
+        if (mLastFetchStats && systemTime(SYSTEM_TIME_BOOTTIME) - mLastFetchNs < toleranceNs) {
+            return mLastFetchStats;
+        }
+    }
+    return {};
+}
+
+void PowerStatsCollector::addProvider(std::unique_ptr<PowerStatsProvider>&& powerStatsProvider) {
+    mPowerStatsProviders.emplace_back(std::move(powerStatsProvider));
+}
+
+int PowerStatsCollector::fill(PowerStats* stats) const {
+    if (!stats) {
+        LOG(ERROR) << __func__ << ": bad args; stat is null";
+        return 1;
+    }
+
+    for (const auto& provider : mPowerStatsProviders) {
+        (void) provider->fill(stats); // on error, we continue to proceed.
+    }
+
+    // boot time follows wall clock time, but starts from boot.
+    stats->metadata.start_time_since_boot_ms = systemTime(SYSTEM_TIME_BOOTTIME) / 1'000'000;
+
+    // wall clock time
+    stats->metadata.start_time_epoch_ms = systemTime(SYSTEM_TIME_REALTIME) / 1'000'000;
+
+    // monotonic time follows boot time, but does not include any time suspended.
+    stats->metadata.start_time_monotonic_ms = systemTime() / 1'000'000;
+    return 0;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsProvider.cpp b/media/psh_utils/PowerStatsProvider.cpp
new file mode 100644
index 0000000..112c323
--- /dev/null
+++ b/media/psh_utils/PowerStatsProvider.cpp
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "PowerStatsProvider.h"
+#include <aidl/android/hardware/power/stats/IPowerStats.h>
+#include <android-base/logging.h>
+#include <psh_utils/ServiceSingleton.h>
+#include <unordered_map>
+
+using ::aidl::android::hardware::power::stats::IPowerStats;
+
+namespace android::media::psh_utils {
+
+static auto getPowerStatsService() {
+    return getServiceSingleton<IPowerStats>();
+}
+
+status_t RailEnergyDataProvider::fill(PowerStats *stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    auto powerStatsService = getPowerStatsService();
+    if (powerStatsService == nullptr) {
+        return NO_INIT;
+    }
+
+    std::unordered_map<int32_t, ::aidl::android::hardware::power::stats::Channel> channelMap;
+    {
+        std::vector<::aidl::android::hardware::power::stats::Channel> channels;
+        if (!powerStatsService->getEnergyMeterInfo(&channels).isOk()) {
+            LOG(ERROR) << "unable to get energy meter info";
+            return INVALID_OPERATION;
+        }
+        for (auto& channel : channels) {
+          channelMap.emplace(channel.id, std::move(channel));
+        }
+    }
+
+    std::vector<::aidl::android::hardware::power::stats::EnergyMeasurement> measurements;
+    if (!powerStatsService->readEnergyMeter({}, &measurements).isOk()) {
+        LOG(ERROR) << "unable to get energy measurements";
+        return INVALID_OPERATION;
+    }
+
+    for (const auto& measurement : measurements) {
+        stat->rail_energy.emplace_back(
+            channelMap.at(measurement.id).subsystem,
+            channelMap.at(measurement.id).name,
+            measurement.energyUWs);
+    }
+
+    // Sort entries first by subsystem_name, then by rail_name.
+    // Sorting is needed to make interval processing efficient.
+    std::sort(stat->rail_energy.begin(), stat->rail_energy.end(),
+              [](const auto& a, const auto& b) {
+                  if (a.subsystem_name != b.subsystem_name) {
+                      return a.subsystem_name < b.subsystem_name;
+                  }
+                  return a.rail_name < b.rail_name;
+              });
+
+    return NO_ERROR;
+}
+
+status_t PowerEntityResidencyDataProvider::fill(PowerStats* stat) const {
+    if (stat == nullptr) return BAD_VALUE;
+    auto powerStatsService = getPowerStatsService();
+    if (powerStatsService == nullptr) {
+        return NO_INIT;
+    }
+
+    // these are based on entityId
+    std::unordered_map<int32_t, std::string> entityNames;
+    std::unordered_map<int32_t, std::unordered_map<int32_t, std::string>> stateNames;
+    std::vector<int32_t> powerEntityIds; // ids to use
+
+    {
+        std::vector<::aidl::android::hardware::power::stats::PowerEntity> entities;
+        if (!powerStatsService->getPowerEntityInfo(&entities).isOk()) {
+            LOG(ERROR) << __func__ << ": unable to get entity info";
+            return INVALID_OPERATION;
+        }
+
+        std::vector<std::string> powerEntityNames;
+        for (const auto& entity : entities) {
+            std::unordered_map<int32_t, std::string> states;
+            for (const auto& state : entity.states) {
+                states.emplace(state.id, state.name);
+            }
+
+            if (std::find(powerEntityNames.begin(), powerEntityNames.end(), entity.name) !=
+                powerEntityNames.end()) {
+                powerEntityIds.emplace_back(entity.id);
+            }
+            entityNames.emplace(entity.id, std::move(entity.name));
+            stateNames.emplace(entity.id, std::move(states));
+        }
+    }
+
+    std::vector<::aidl::android::hardware::power::stats::StateResidencyResult> results;
+    if (!powerStatsService->getStateResidency(powerEntityIds, &results).isOk()) {
+        LOG(ERROR) << __func__ << ": Unable to get state residency";
+        return INVALID_OPERATION;
+    }
+
+    for (const auto& result : results) {
+        for (const auto& curStateResidency : result.stateResidencyData) {
+          stat->power_entity_state_residency.emplace_back(
+              entityNames.at(result.id),
+              stateNames.at(result.id).at(curStateResidency.id),
+              static_cast<uint64_t>(curStateResidency.totalTimeInStateMs),
+              static_cast<uint64_t>(curStateResidency.totalStateEntryCount));
+        }
+    }
+
+    // Sort entries first by entity_name, then by state_name.
+    // Sorting is needed to make interval processing efficient.
+    std::sort(stat->power_entity_state_residency.begin(),
+              stat->power_entity_state_residency.end(),
+              [](const auto& a, const auto& b) {
+                  if (a.entity_name != b.entity_name) {
+                      return a.entity_name < b.entity_name;
+                  }
+                  return a.state_name < b.state_name;
+              });
+    return NO_ERROR;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/PowerStatsProvider.h b/media/psh_utils/PowerStatsProvider.h
new file mode 100644
index 0000000..c3888ac
--- /dev/null
+++ b/media/psh_utils/PowerStatsProvider.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <psh_utils/PowerStatsCollector.h>
+
+namespace android::media::psh_utils {
+
+class RailEnergyDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+class PowerEntityResidencyDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+class HealthStatsDataProvider : public PowerStatsProvider {
+public:
+    status_t fill(PowerStats* stat) const override;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/benchmarks/Android.bp b/media/psh_utils/benchmarks/Android.bp
new file mode 100644
index 0000000..20efaa9
--- /dev/null
+++ b/media/psh_utils/benchmarks/Android.bp
@@ -0,0 +1,51 @@
+package {
+    default_team: "trendy_team_android_media_audio_framework",
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_benchmark {
+    name: "audio_powerstats_benchmark",
+
+    srcs: ["audio_powerstats_benchmark.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libpshutils",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+}
+
+cc_benchmark {
+    name: "audio_powerstatscollector_benchmark",
+
+    srcs: ["audio_powerstatscollector_benchmark.cpp"],
+    cflags: [
+        "-Wall",
+        "-Werror",
+    ],
+    static_libs: [
+        "libaudioutils",
+        "libpshutils",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+}
diff --git a/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp b/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp
new file mode 100644
index 0000000..d3f815c
--- /dev/null
+++ b/media/psh_utils/benchmarks/audio_powerstats_benchmark.cpp
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "audio_powerstat_benchmark"
+#include <cutils/properties.h>
+#include <utils/Log.h>
+
+#include <psh_utils/PerformanceFixture.h>
+
+#include <algorithm>
+#include <android-base/strings.h>
+#include <random>
+#include <thread>
+#include <vector>
+
+float result = 0;
+
+using android::media::psh_utils::CoreClass;
+using android::media::psh_utils::CORE_LITTLE;
+using android::media::psh_utils::CORE_MID;
+using android::media::psh_utils::CORE_BIG;
+
+enum Direction {
+    DIRECTION_FORWARD,
+    DIRECTION_BACKWARD,
+    DIRECTION_RANDOM,
+};
+
+std::string toString(Direction direction) {
+    switch (direction) {
+        case DIRECTION_FORWARD: return "DIRECTION_FORWARD";
+        case DIRECTION_BACKWARD: return "DIRECTION_BACKWARD";
+        case DIRECTION_RANDOM: return "DIRECTION_RANDOM";
+        default: return "DIRECTION_UNKNOWN";
+    }
+}
+
+enum Content {
+    CONTENT_ZERO,
+    CONTENT_RANDOM,
+};
+
+std::string toString(Content content) {
+    switch (content) {
+        case CONTENT_ZERO: return "CONTENT_ZERO";
+        case CONTENT_RANDOM: return "CONTENT_RANDOM";
+        default: return "CONTENT_UNKNOWN";
+    }
+}
+
+class MemoryFixture : public android::media::psh_utils::PerformanceFixture {
+public:
+    void SetUp(benchmark::State& state) override {
+        mCount = state.range(0) / (sizeof(uint32_t) + sizeof(float));
+        state.SetComplexityN(mCount * 2);  // 2 access per iteration.
+
+        // create src distribution
+        mSource.resize(mCount);
+        const auto content = static_cast<Content>(state.range(3));
+        if (content == CONTENT_RANDOM) {
+            std::minstd_rand gen(mCount);
+            std::uniform_real_distribution<float> dis(-1.f, 1.f);
+            for (size_t i = 0; i < mCount; i++) {
+                mSource[i] = dis(gen);
+            }
+        }
+
+        // create direction
+        mIndex.resize(mCount);
+        const auto direction = static_cast<Direction>(state.range(2));
+        switch (direction) {
+            case DIRECTION_BACKWARD:
+                for (size_t i = 0; i < mCount; i++) {
+                    mIndex[i] = i;  // it is also possible to go in the reverse direction
+                }
+                break;
+            case DIRECTION_FORWARD:
+            case DIRECTION_RANDOM:
+                for (size_t i = 0; i < mCount; i++) {
+                    mIndex[i] = i;  // it is also possible to go in the reverse direction
+                }
+                if (direction == DIRECTION_RANDOM) {
+                    std::random_device rd;
+                    std::mt19937 g(rd());
+                    std::shuffle(mIndex.begin(), mIndex.end(), g);
+                }
+                break;
+        }
+
+        // set up the profiler
+        const auto coreClass = static_cast<CoreClass>(state.range(1));
+
+        // It would be best if we could override SetName() but it is too late at this point,
+        // so we set the state label here for clarity.
+        state.SetLabel(toString(coreClass).append("/")
+            .append(toString(direction)).append("/")
+            .append(toString(content)));
+
+        if (property_get_bool("persist.audio.benchmark_profile", false)) {
+            startProfiler(coreClass);
+        }
+    }
+    size_t mCount = 0;
+    std::vector<uint32_t> mIndex;
+    std::vector<float> mSource;
+};
+
+BENCHMARK_DEFINE_F(MemoryFixture, CacheAccess)(benchmark::State &state) {
+    float accum = 0;
+    while (state.KeepRunning()) {
+        for (size_t i = 0; i < mCount; ++i) {
+            accum += mSource[mIndex[i]];
+        }
+        benchmark::ClobberMemory();
+    }
+    result += accum; // not optimized
+}
+
+BENCHMARK_REGISTER_F(MemoryFixture, CacheAccess)->ArgsProduct({
+    benchmark::CreateRange(64, 64<<20, /* multi = */2),
+    {CORE_LITTLE, CORE_MID, CORE_BIG},
+    {DIRECTION_FORWARD, DIRECTION_RANDOM},
+    {CONTENT_RANDOM},
+});
+
+BENCHMARK_MAIN();
diff --git a/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp b/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp
new file mode 100644
index 0000000..9e581bc
--- /dev/null
+++ b/media/psh_utils/benchmarks/audio_powerstatscollector_benchmark.cpp
@@ -0,0 +1,52 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "audio_token_benchmark"
+#include <utils/Log.h>
+
+#include <psh_utils/PowerStatsCollector.h>
+
+#include <benchmark/benchmark.h>
+
+/*
+ Pixel 8 Pro
+------------------------------------------------------------------------------------------
+ Benchmark                            Time                      CPU             Iteration
+------------------------------------------------------------------------------------------
+audio_powerstatscollector_benchmark:
+  #BM_StatsToleranceMs/0      1.2005660120994434E8 ns            2532739.72 ns          100
+  #BM_StatsToleranceMs/50        1281.095987079007 ns     346.0322183913503 ns      2022168
+  #BM_StatsToleranceMs/100       459.9668862534226 ns    189.47902626735942 ns      2891307
+  #BM_StatsToleranceMs/200       233.8438662484292 ns    149.84041813854736 ns      4407343
+  #BM_StatsToleranceMs/500      184.42197142314103 ns    144.86896036787098 ns      7295167
+*/
+
+// We check how expensive it is to query stats depending
+// on the tolerance to reuse the cached values.
+// A tolerance of 0 means we always fetch stats.
+static void BM_StatsToleranceMs(benchmark::State& state) {
+    auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+    const int64_t toleranceNs = state.range(0) * 1'000'000;
+    while (state.KeepRunning()) {
+        collector.getStats(toleranceNs);
+        benchmark::ClobberMemory();
+    }
+}
+
+// Here we test various time tolerances (given in milliseconds here)
+BENCHMARK(BM_StatsToleranceMs)->Arg(0)->Arg(50)->Arg(100)->Arg(200)->Arg(500);
+
+BENCHMARK_MAIN();
diff --git a/media/psh_utils/include/psh_utils/HealthStats.h b/media/psh_utils/include/psh_utils/HealthStats.h
new file mode 100644
index 0000000..d7a8d1a
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/HealthStats.h
@@ -0,0 +1,58 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <string>
+
+namespace android::media::psh_utils {
+
+// From hardware/interfaces/health/aidl/android/hardware/health/HealthInfo.aidl
+
+struct HealthStats {
+    /**
+     * Instantaneous battery voltage in millivolts (mV).
+     *
+     * Historically, the unit of this field is microvolts (µV), but all
+     * clients and implementations uses millivolts in practice, making it
+     * the de-facto standard.
+     */
+    double batteryVoltageMillivolts;
+    /**
+     * Battery charge value when it is considered to be "full" in µA-h
+     */
+    double batteryFullChargeUah;
+    /**
+     * Instantaneous battery capacity in µA-h
+     */
+    double batteryChargeCounterUah;
+
+    std::string normalizedEnergy(double time) const;
+
+    bool isValid() const { return batteryVoltageMillivolts > 0; }
+
+    // Returns {seconds, joules, watts} from battery counters
+    std::tuple<float, float, float> energyFrom(const std::string& s) const;
+    std::string toString() const;
+
+    HealthStats operator+=(const HealthStats& other);
+    HealthStats operator-=(const HealthStats& other);
+    HealthStats operator+(const HealthStats& other) const;
+    HealthStats operator-(const HealthStats& other) const;
+    bool operator==(const HealthStats& other) const = default;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PerformanceFixture.h b/media/psh_utils/include/psh_utils/PerformanceFixture.h
new file mode 100644
index 0000000..092a508
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PerformanceFixture.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <audio_utils/threads.h>
+#include <benchmark/benchmark.h>
+#include <psh_utils/PowerStats.h>
+#include <psh_utils/PowerStatsCollector.h>
+
+#include <future>
+
+namespace android::media::psh_utils {
+
+enum CoreClass {
+    CORE_LITTLE,
+    CORE_MID,
+    CORE_BIG,
+};
+
+inline std::string toString(CoreClass coreClass) {
+    switch (coreClass) {
+        case CORE_LITTLE: return "LITTLE";
+        case CORE_MID: return "MID";
+        case CORE_BIG: return "BIG";
+        default: return "UNKNOWN";
+    }
+}
+
+/**
+ * A benchmark fixture is used to specify benchmarks that have a custom SetUp() and
+ * TearDown().  This is **required** for performance testing, as a typical benchmark
+ * method **may be called several times** during a run.
+ *
+ * A fixture ensures that SetUp() and TearDown() and the resulting statistics accumulation
+ * is done only once.   Note: BENCHMARK(BM_func)->Setup(DoSetup)->Teardown(DoTeardown)
+ * does something similar, but it requires some singleton to contain the state properly.
+ */
+class PerformanceFixture : public benchmark::Fixture {
+public:
+    // call this to start the profiling
+    virtual void startProfiler(CoreClass coreClass) {
+        mCores = android::audio_utils::get_number_cpus();
+        if (mCores == 0) return;
+        mCoreClass = coreClass;
+        std::array<unsigned, 3> coreSelection{0U, mCores / 2 + 1, mCores - 1};
+        mCore = coreSelection[std::min((size_t)coreClass, std::size(coreSelection) - 1)];
+
+        auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+        mStartStats = collector.getStats();
+
+        const pid_t tid = gettid(); // us.
+
+        // Possibly change priority to improve benchmarking
+        // android::audio_utils::set_thread_priority(gettid(), 98);
+
+        android::audio_utils::set_thread_affinity(0 /* pid */, 1 << mCore);
+    }
+
+    void TearDown(benchmark::State &state) override {
+        const auto N = state.complexity_length_n();
+        state.counters["N"] = benchmark::Counter(N,
+                benchmark::Counter::kIsIterationInvariantRate, benchmark::Counter::OneK::kIs1024);
+        if (mStartStats) {
+            auto& collector = android::media::psh_utils::PowerStatsCollector::getCollector();
+            const auto stopStats = collector.getStats();
+            android::media::psh_utils::PowerStats diff = *stopStats - *mStartStats;
+            auto cpuEnergy = diff.energyFrom("CPU");
+            auto memEnergy = diff.energyFrom("MEM");
+
+            constexpr float kMwToW = 1e-3;
+            state.counters["WCPU"] = benchmark::Counter(std::get<2>(cpuEnergy) * kMwToW,
+                                                          benchmark::Counter::kDefaults,
+                                                          benchmark::Counter::OneK::kIs1000);
+            state.counters["WMem"] = benchmark::Counter(std::get<2>(memEnergy) * kMwToW,
+                                                          benchmark::Counter::kDefaults,
+                                                          benchmark::Counter::OneK::kIs1000);
+            state.counters["JCPU"] = benchmark::Counter(
+                    std::get<1>(cpuEnergy) / N / state.iterations(), benchmark::Counter::kDefaults,
+                    benchmark::Counter::OneK::kIs1000);
+            state.counters["JMem"] = benchmark::Counter(
+                    std::get<1>(memEnergy) / N / state.iterations(), benchmark::Counter::kDefaults,
+                    benchmark::Counter::OneK::kIs1000);
+        }
+    }
+
+protected:
+    // these are only initialized upon startProfiler.
+    unsigned mCores = 0;
+    int mCore = 0;
+    CoreClass mCoreClass = CORE_LITTLE;
+    std::shared_ptr<const android::media::psh_utils::PowerStats> mStartStats;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PowerStats.h b/media/psh_utils/include/psh_utils/PowerStats.h
new file mode 100644
index 0000000..ae48606
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PowerStats.h
@@ -0,0 +1,108 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "HealthStats.h"
+
+#include <string>
+#include <unordered_map>
+#include <vector>
+
+namespace android::media::psh_utils {
+
+// See powerstats_util.proto and powerstats_util.pb.h
+
+struct PowerStats {
+    struct Metadata {
+        // Represents the start time measured in milliseconds since boot of the
+        // interval or point in time when stats were gathered.
+        uint64_t start_time_since_boot_ms;
+
+        // Represents the start time measured in milliseconds since epoch of the
+        // interval or point in time when stats were gathered.
+        uint64_t start_time_epoch_ms;
+
+        // In monotonic clock.
+        uint64_t start_time_monotonic_ms;
+
+        // If PowerStats represent an interval, the duration field will be set will
+        // the millisecond duration of stats collection. It will be unset for point
+        // stats.
+        // This is in boottime.
+        uint64_t duration_ms;
+
+        // This is in monotonic time, and does not include suspend.
+        uint64_t duration_monotonic_ms;
+
+        std::string toString() const;
+
+        Metadata operator+=(const Metadata& other);
+        Metadata operator-=(const Metadata& other);
+        Metadata operator+(const Metadata& other) const;
+        Metadata operator-(const Metadata& other) const;
+        bool operator==(const Metadata& other) const = default;
+    };
+
+    struct StateResidency {
+        std::string entity_name;
+        std::string state_name;
+        uint64_t time_ms;
+        uint64_t entry_count;
+
+        std::string toString() const;
+
+        StateResidency operator+=(const StateResidency& other);
+        StateResidency operator-=(const StateResidency& other);
+        StateResidency operator+(const StateResidency& other) const;
+        StateResidency operator-(const StateResidency& other) const;
+        bool operator==(const StateResidency& other) const = default;
+    };
+
+    struct RailEnergy {
+        std::string subsystem_name;
+        std::string rail_name;
+        uint64_t energy_uws;
+
+        std::string toString() const;
+        RailEnergy operator+=(const RailEnergy& other);
+        RailEnergy operator-=(const RailEnergy& other);
+        RailEnergy operator+(const RailEnergy& other) const;
+        RailEnergy operator-(const RailEnergy& other) const;
+        bool operator==(const RailEnergy& other) const = default;
+    };
+
+    HealthStats health_stats;
+
+    std::string normalizedEnergy(const std::string& prefix = {}) const;
+
+    // Returns {seconds, joules, watts} from all rails containing a matching string.
+    std::tuple<float, float, float> energyFrom(const std::string& railMatcher) const;
+    std::string toString(const std::string& prefix = {}) const;
+
+    PowerStats operator+=(const PowerStats& other);
+    PowerStats operator-=(const PowerStats& other);
+    PowerStats operator+(const PowerStats& other) const;
+    PowerStats operator-(const PowerStats& other) const;
+    bool operator==(const PowerStats& other) const = default;
+
+    Metadata metadata{};
+    // These are sorted by name.
+    std::vector<StateResidency> power_entity_state_residency;
+    std::vector<RailEnergy> rail_energy;
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/PowerStatsCollector.h b/media/psh_utils/include/psh_utils/PowerStatsCollector.h
new file mode 100644
index 0000000..e3f8ea8
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/PowerStatsCollector.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include "PowerStats.h"
+#include <android-base/thread_annotations.h>
+#include <memory>
+#include <utils/Errors.h> // status_t
+
+namespace android::media::psh_utils {
+
+// Internal providers that fill up the PowerStats state object.
+class PowerStatsProvider {
+public:
+    virtual ~PowerStatsProvider() = default;
+    virtual status_t fill(PowerStats* stat) const = 0;
+};
+
+class PowerStatsCollector {
+public:
+    // singleton getter
+    static PowerStatsCollector& getCollector();
+
+    // Returns a snapshot of the state.
+    // If toleranceNs > 0, we permit the use of a stale snapshot taken within that tolerance.
+    std::shared_ptr<const PowerStats> getStats(int64_t toleranceNs = 0)
+            EXCLUDES(mMutex, mMutexExclusiveFill);
+
+private:
+    PowerStatsCollector();  // use the singleton getter
+
+    // Returns non-empty PowerStats if we have a previous stats snapshot within toleranceNs.
+    std::shared_ptr<const PowerStats> checkLastStats(int64_t toleranceNs) const EXCLUDES(mMutex);
+    int fill(PowerStats* stats) const;
+    void addProvider(std::unique_ptr<PowerStatsProvider>&& powerStatsProvider);
+
+    mutable std::mutex mMutexExclusiveFill;
+    mutable std::mutex mMutex;
+    // addProvider is called in the ctor, so effectively const.
+    std::vector<std::unique_ptr<PowerStatsProvider>> mPowerStatsProviders;
+    int64_t mLastFetchNs GUARDED_BY(mMutex) = 0;
+    std::shared_ptr<const PowerStats> mLastFetchStats GUARDED_BY(mMutex);
+};
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/include/psh_utils/ServiceSingleton.h b/media/psh_utils/include/psh_utils/ServiceSingleton.h
new file mode 100644
index 0000000..d0cd6d2
--- /dev/null
+++ b/media/psh_utils/include/psh_utils/ServiceSingleton.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <android/binder_auto_utils.h>
+#include <android/binder_manager.h>
+#include <android-base/thread_annotations.h>
+#include <mutex>
+#include <utils/Log.h>
+#include <utils/Timers.h>
+
+namespace android::media::psh_utils {
+
+struct DefaultServiceTraits {
+    static constexpr int64_t kThresholdRetryNs = 1'000'000'000;
+    static constexpr int64_t kMaxRetries = 5;
+    static constexpr const char* kServiceVersion = "/default";
+    static constexpr bool kShowLog = true;
+};
+
+template<typename Service, typename ServiceTraits = DefaultServiceTraits>
+std::shared_ptr<Service> getServiceSingleton() {
+    [[clang::no_destroy]] static constinit std::mutex m;
+    [[clang::no_destroy]] static constinit std::shared_ptr<Service> service GUARDED_BY(m);
+    static int64_t nextTryNs GUARDED_BY(m) = 0;
+    static int64_t tries GUARDED_BY(m) = 0;
+
+    std::lock_guard l(m);
+    if (service
+            || tries > ServiceTraits::kMaxRetries  // try too many times
+            || systemTime(SYSTEM_TIME_BOOTTIME) < nextTryNs) {  // try too frequently.
+        return service;
+    }
+
+    const auto serviceName = std::string(Service::descriptor)
+            .append(ServiceTraits::kServiceVersion);
+    service = Service::fromBinder(
+            ::ndk::SpAIBinder(AServiceManager_checkService(serviceName.c_str())));
+
+    if (!service) {
+        // If failed, set a time limit before retry.
+        // No need to log an error, it is already done.
+        nextTryNs = systemTime(SYSTEM_TIME_BOOTTIME) + ServiceTraits::kThresholdRetryNs;
+        ALOGV_IF(ServiceTraits::kShowLog, "service:%s  retries:%lld of %lld  nextTryNs:%lld",
+                Service::descriptor, (long long)tries,
+                (long long)kMaxRetries, (long long)nextTryNs);
+        ++tries;
+    }
+
+    return service;
+}
+
+} // namespace android::media::psh_utils
diff --git a/media/psh_utils/tests/Android.bp b/media/psh_utils/tests/Android.bp
new file mode 100644
index 0000000..74589f8
--- /dev/null
+++ b/media/psh_utils/tests/Android.bp
@@ -0,0 +1,26 @@
+package {
+    default_team: "trendy_team_media_framework_audio",
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "frameworks_av_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    default_applicable_licenses: ["frameworks_av_license"],
+}
+
+cc_test {
+    name: "powerstats_collector_tests",
+    srcs: [
+        "powerstats_collector_tests.cpp",
+    ],
+    shared_libs: [
+        "libbase",
+        "libbinder_ndk",
+        "libcutils",
+        "liblog",
+        "libutils",
+    ],
+    static_libs: [
+        "libpshutils",
+    ],
+}
diff --git a/media/psh_utils/tests/powerstats_collector_tests.cpp b/media/psh_utils/tests/powerstats_collector_tests.cpp
new file mode 100644
index 0000000..35c264a
--- /dev/null
+++ b/media/psh_utils/tests/powerstats_collector_tests.cpp
@@ -0,0 +1,181 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <psh_utils/PowerStatsCollector.h>
+#include <gtest/gtest.h>
+#include <utils/Log.h>
+
+using namespace android::media::psh_utils;
+
+template <typename T>
+void inRange(const T& a, const T& b, const T& c) {
+    ASSERT_GE(a, std::min(b, c));
+    ASSERT_LE(a, std::max(b, c));
+}
+
+TEST(powerstat_collector_tests, basic) {
+    auto& psc = PowerStatsCollector::getCollector();
+
+    // This test is used for debugging the string through logcat, we validate a non-empty string.
+    auto powerStats = psc.getStats();
+    ALOGD("%s: %s", __func__, powerStats->toString().c_str());
+    EXPECT_FALSE(powerStats->toString().empty());
+}
+
+TEST(powerstat_collector_tests, metadata) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kDurationMs1 = 5;
+    constexpr uint64_t kDurationMs2 = 10;
+    ps1.metadata.duration_ms = kDurationMs1;
+    ps2.metadata.duration_ms = kDurationMs2;
+
+    constexpr uint64_t kDurationMonotonicMs1 = 3;
+    constexpr uint64_t kDurationMonotonicMs2 = 9;
+    ps1.metadata.duration_monotonic_ms = kDurationMonotonicMs1;
+    ps2.metadata.duration_monotonic_ms = kDurationMonotonicMs2;
+
+    constexpr uint64_t kStartTimeSinceBootMs1 = 1616;
+    constexpr uint64_t kStartTimeEpochMs1 = 1121;
+    constexpr uint64_t kStartTimeMonotonicMs1 = 1525;
+    constexpr uint64_t kStartTimeSinceBootMs2 = 2616;
+    constexpr uint64_t kStartTimeEpochMs2 = 2121;
+    constexpr uint64_t kStartTimeMonotonicMs2 = 2525;
+
+    ps1.metadata.start_time_since_boot_ms = kStartTimeSinceBootMs1;
+    ps1.metadata.start_time_epoch_ms = kStartTimeEpochMs1;
+    ps1.metadata.start_time_monotonic_ms = kStartTimeMonotonicMs1;
+    ps2.metadata.start_time_since_boot_ms = kStartTimeSinceBootMs2;
+    ps2.metadata.start_time_epoch_ms = kStartTimeEpochMs2;
+    ps2.metadata.start_time_monotonic_ms = kStartTimeMonotonicMs2;
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kDurationMs1 + kDurationMs2,
+            ps3.metadata.duration_ms);
+    EXPECT_EQ(kDurationMonotonicMs1 + kDurationMonotonicMs2,
+            ps3.metadata.duration_monotonic_ms);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_since_boot_ms,
+            kStartTimeSinceBootMs1, kStartTimeSinceBootMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_epoch_ms,
+            kStartTimeEpochMs1, kStartTimeEpochMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.metadata.start_time_monotonic_ms,
+            kStartTimeMonotonicMs1, kStartTimeMonotonicMs2));
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kDurationMs2 - kDurationMs1,
+            ps5.metadata.duration_ms);
+    EXPECT_EQ(kDurationMonotonicMs2 - kDurationMonotonicMs1,
+            ps5.metadata.duration_monotonic_ms);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_since_boot_ms,
+            kStartTimeSinceBootMs1, kStartTimeSinceBootMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_epoch_ms,
+            kStartTimeEpochMs1, kStartTimeEpochMs2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.metadata.start_time_monotonic_ms,
+         kStartTimeMonotonicMs1, kStartTimeMonotonicMs2));
+}
+
+TEST(powerstat_collector_tests, state_residency) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kTimeMs1 = 5;
+    constexpr uint64_t kTimeMs2 = 10;
+    constexpr uint64_t kEntryCount1 = 15;
+    constexpr uint64_t kEntryCount2 = 18;
+
+    ps1.power_entity_state_residency.emplace_back(
+            PowerStats::StateResidency{"", "", kTimeMs1, kEntryCount1});
+    ps2.power_entity_state_residency.emplace_back(
+            PowerStats::StateResidency{"", "", kTimeMs2, kEntryCount2});
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kTimeMs1 + kTimeMs2,
+            ps3.power_entity_state_residency[0].time_ms);
+    EXPECT_EQ(kEntryCount1 + kEntryCount2,
+            ps3.power_entity_state_residency[0].entry_count);
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kTimeMs2 - kTimeMs1,
+            ps5.power_entity_state_residency[0].time_ms);
+    EXPECT_EQ(kEntryCount2 - kEntryCount1,
+            ps5.power_entity_state_residency[0].entry_count);
+}
+
+TEST(powerstat_collector_tests, rail_energy) {
+    PowerStats ps1, ps2;
+
+    constexpr uint64_t kEnergyUws1 = 5;
+    constexpr uint64_t kEnergyUws2 = 10;
+
+    ps1.rail_energy.emplace_back(
+            PowerStats::RailEnergy{"", "", kEnergyUws1});
+    ps2.rail_energy.emplace_back(
+            PowerStats::RailEnergy{"", "", kEnergyUws2});
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kEnergyUws1 + kEnergyUws2,
+            ps3.rail_energy[0].energy_uws);
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kEnergyUws2 - kEnergyUws1,
+            ps5.rail_energy[0].energy_uws);
+}
+
+TEST(powerstat_collector_tests, health_stats) {
+    PowerStats ps1, ps2;
+
+    constexpr double kBatteryChargeCounterUah1 = 21;
+    constexpr double kBatteryChargeCounterUah2 = 25;
+    ps1.health_stats.batteryChargeCounterUah = kBatteryChargeCounterUah1;
+    ps2.health_stats.batteryChargeCounterUah = kBatteryChargeCounterUah2;
+
+    constexpr double kBatteryFullChargeUah1 = 32;
+    constexpr double kBatteryFullChargeUah2 = 33;
+    ps1.health_stats.batteryFullChargeUah = kBatteryFullChargeUah1;
+    ps2.health_stats.batteryFullChargeUah = kBatteryFullChargeUah2;
+
+    constexpr double kBatteryVoltageMillivolts1 = 42;
+    constexpr double kBatteryVoltageMillivolts2 = 43;
+    ps1.health_stats.batteryVoltageMillivolts = kBatteryVoltageMillivolts1;
+    ps2.health_stats.batteryVoltageMillivolts = kBatteryVoltageMillivolts2;
+
+    PowerStats ps3 = ps1 + ps2;
+    PowerStats ps4 = ps2 + ps1;
+    EXPECT_EQ(ps3, ps4);
+    EXPECT_EQ(kBatteryChargeCounterUah1 + kBatteryChargeCounterUah2,
+              ps3.health_stats.batteryChargeCounterUah);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.health_stats.batteryFullChargeUah,
+             kBatteryFullChargeUah1, kBatteryFullChargeUah2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps3.health_stats.batteryVoltageMillivolts,
+             kBatteryVoltageMillivolts1, kBatteryVoltageMillivolts2));
+
+    PowerStats ps5 = ps2 - ps1;
+    EXPECT_EQ(kBatteryChargeCounterUah2 - kBatteryChargeCounterUah1,
+              ps5.health_stats.batteryChargeCounterUah);
+
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.health_stats.batteryFullChargeUah,
+            kBatteryFullChargeUah1, kBatteryFullChargeUah2));
+    EXPECT_NO_FATAL_FAILURE(inRange(ps5.health_stats.batteryVoltageMillivolts,
+            kBatteryVoltageMillivolts1, kBatteryVoltageMillivolts2));
+}
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 7bec8cf..ffcde42 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -286,7 +286,7 @@
 bool modifyAudioRoutingAllowed(const AttributionSourceState& attributionSource) {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
+    if (isAudioServerUid(uid)) return true;
     // IMPORTANT: Use PermissionCache - not a runtime permission and may not change.
     bool ok = PermissionCache::checkPermission(sModifyAudioRouting, pid, uid);
     if (!ok) ALOGE("%s(): android.permission.MODIFY_AUDIO_ROUTING denied for uid %d",
@@ -301,7 +301,7 @@
 bool modifyDefaultAudioEffectsAllowed(const AttributionSourceState& attributionSource) {
     uid_t uid = VALUE_OR_FATAL(aidl2legacy_int32_t_uid_t(attributionSource.uid));
     pid_t pid = VALUE_OR_FATAL(aidl2legacy_int32_t_pid_t(attributionSource.pid));
-    if (isAudioServerUid(IPCThreadState::self()->getCallingUid())) return true;
+    if (isAudioServerUid(uid)) return true;
 
     static const String16 sModifyDefaultAudioEffectsAllowed(
             "android.permission.MODIFY_DEFAULT_AUDIO_EFFECTS");
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index 3ff62a8..77b342a 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -39,13 +39,17 @@
 #include <binder/IServiceManager.h>
 #include <binder/Parcel.h>
 #include <cutils/properties.h>
+#include <com_android_media_audio.h>
 #include <com_android_media_audioserver.h>
 #include <media/AidlConversion.h>
 #include <media/AudioParameter.h>
 #include <media/AudioValidator.h>
 #include <media/IMediaLogService.h>
+#include <media/IPermissionProvider.h>
 #include <media/MediaMetricsItem.h>
+#include <media/NativePermissionController.h>
 #include <media/TypeConverter.h>
+#include <media/ValidatedAttributionSourceState.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/MemoryLeakTrackUtil.h>
 #include <mediautils/MethodStatistics.h>
@@ -81,12 +85,17 @@
 namespace android {
 
 using ::android::base::StringPrintf;
+using aidl_utils::statusTFromBinderStatus;
 using media::IEffectClient;
 using media::audio::common::AudioMMapPolicyInfo;
 using media::audio::common::AudioMMapPolicyType;
 using media::audio::common::AudioMode;
 using android::content::AttributionSourceState;
 using android::detail::AudioHalVersionInfo;
+using com::android::media::permission::INativePermissionController;
+using com::android::media::permission::IPermissionProvider;
+using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::ValidatedAttributionSourceState;
 
 static const AudioHalVersionInfo kMaxAAudioPropertyDeviceHalVersion =
         AudioHalVersionInfo(AudioHalVersionInfo::Type::HIDL, 7, 1);
@@ -118,6 +127,52 @@
     }
 }
 
+static error::BinderResult<ValidatedAttributionSourceState>
+validateAttributionFromContextOrTrustedCaller(AttributionSourceState attr,
+        const IPermissionProvider& provider) {
+    const auto callingUid = IPCThreadState::self()->getCallingUid();
+    // We trust the following UIDs to appropriate validated identities above us
+    if (isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+        // Legacy paths may not properly populate package name, so we attempt to handle.
+        if (!attr.packageName.has_value() || attr.packageName.value() == "") {
+            ALOGW("Trusted client %d provided attr with missing package name" , callingUid);
+            attr.packageName = VALUE_OR_RETURN(provider.getPackagesForUid(callingUid))[0];
+        }
+        // Behavior change: In the case of delegation, if pid is invalid,
+        // filling it in with the callingPid will cause a mismatch between the
+        // pid and the uid in the attribution, which is error-prone.
+        // Instead, assert that the pid from a trusted source is valid
+        if (attr.pid == -1) {
+            if (callingUid != static_cast<uid_t>(attr.uid)) {
+                return error::unexpectedExceptionCode(binder::Status::EX_ILLEGAL_ARGUMENT,
+                        "validateAttribution: Invalid pid from delegating trusted source");
+            } else {
+                // Legacy handling for trusted clients which may not fill pid correctly
+                attr.pid = IPCThreadState::self()->getCallingPid();
+            }
+        }
+        return ValidatedAttributionSourceState::createFromTrustedSource(std::move(attr));
+    } else {
+        // Behavior change: Populate pid with callingPid unconditionally. Previously, we
+        // allowed caller provided pid, if uid matched calling context, but this is error-prone
+        // since it allows mismatching uid/pid
+        return ValidatedAttributionSourceState::createFromBinderContext(std::move(attr), provider);
+    }
+}
+
+#define VALUE_OR_RETURN_CONVERTED(exp)                                                \
+    ({                                                                                \
+        auto _tmp = (exp);                                                            \
+        if (!_tmp.ok()) {                                                             \
+            ALOGE("Function: %s Line: %d Failed result (%s)", __FUNCTION__, __LINE__, \
+                  errorToString(_tmp.error()).c_str());                               \
+            return statusTFromBinderStatus(_tmp.error());                             \
+        }                                                                             \
+        std::move(_tmp.value());                                                      \
+    })
+
+
+
 // Creates association between Binder code to name for IAudioFlinger.
 #define IAUDIOFLINGER_BINDER_METHOD_MACRO_LIST \
 BINDER_METHOD_ENTRY(createTrack) \
@@ -132,8 +187,6 @@
 BINDER_METHOD_ENTRY(masterMute) \
 BINDER_METHOD_ENTRY(setStreamVolume) \
 BINDER_METHOD_ENTRY(setStreamMute) \
-BINDER_METHOD_ENTRY(streamVolume) \
-BINDER_METHOD_ENTRY(streamMute) \
 BINDER_METHOD_ENTRY(setMode) \
 BINDER_METHOD_ENTRY(setMicMute) \
 BINDER_METHOD_ENTRY(getMicMute) \
@@ -519,30 +572,42 @@
     audio_attributes_t localAttr = *attr;
 
     // TODO b/182392553: refactor or make clearer
-    pid_t clientPid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
-    bool updatePid = (clientPid == (pid_t)-1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    AttributionSourceState adjAttributionSource;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        pid_t clientPid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+        bool updatePid = (clientPid == (pid_t)-1);
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
 
-    AttributionSourceState adjAttributionSource = client.attributionSource;
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        uid_t clientUid =
-            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
-        ALOGW_IF(clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        updatePid = true;
-    }
-    if (updatePid) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
+        adjAttributionSource = client.attributionSource;
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            uid_t clientUid =
+                VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+            ALOGW_IF(clientUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, clientUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            updatePid = true;
+        }
+        if (updatePid) {
+            const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, clientPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
             adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(client.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+    }
 
     if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
         audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
@@ -683,11 +748,9 @@
 
     result.append("Notification Clients:\n");
     result.append("   pid    uid  name\n");
-    for (size_t i = 0; i < mNotificationClients.size(); ++i) {
-        const pid_t pid = mNotificationClients[i]->getPid();
-        const uid_t uid = mNotificationClients[i]->getUid();
-        const mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
-        result.appendFormat("%6d %6u  %s\n", pid, uid, info.package.c_str());
+    for (const auto& [ _, client ] : mNotificationClients) {
+        result.appendFormat("%6d %6u  %s\n",
+                client->getPid(), client->getUid(), client->getPackageName().c_str());
     }
 
     result.append("Global session refs:\n");
@@ -997,36 +1060,50 @@
     bool isSpatialized = false;
     bool isBitPerfect = false;
 
-    // TODO b/182392553: refactor or make clearer
-    pid_t clientPid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(input.clientInfo.attributionSource.pid));
-    bool updatePid = (clientPid == (pid_t)-1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    uid_t clientUid =
-        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(input.clientInfo.attributionSource.uid));
     audio_io_handle_t effectThreadId = AUDIO_IO_HANDLE_NONE;
     std::vector<int> effectIds;
     audio_attributes_t localAttr = input.attr;
 
-    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        ALOGW_IF(clientUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, clientUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        clientUid = callingUid;
-        updatePid = true;
+    AttributionSourceState adjAttributionSource;
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = input.clientInfo.attributionSource;
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        uid_t clientUid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(
+                        input.clientInfo.attributionSource.uid));
+        pid_t clientPid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+                        input.clientInfo.attributionSource.pid));
+        bool updatePid = (clientPid == (pid_t)-1);
+
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            ALOGW_IF(clientUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, clientUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            clientUid = callingUid;
+            updatePid = true;
+        }
+        if (updatePid) {
+            ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, clientPid);
+            clientPid = callingPid;
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                adjAttributionSource);
+
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(input.clientInfo.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
     }
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    if (updatePid) {
-        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, clientPid);
-        clientPid = callingPid;
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
-            adjAttributionSource);
 
     audio_session_t sessionId = input.sessionId;
     if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -1079,7 +1156,7 @@
             goto Exit;
         }
 
-        client = registerPid(clientPid);
+        client = registerPid(adjAttributionSource.pid);
 
         IAfPlaybackThread* effectThread = nullptr;
         sp<IAfEffectChain> effectChain = nullptr;
@@ -1667,37 +1744,6 @@
     return NO_ERROR;
 }
 
-float AudioFlinger::streamVolume(audio_stream_type_t stream, audio_io_handle_t output) const
-{
-    status_t status = checkStreamType(stream);
-    if (status != NO_ERROR) {
-        return 0.0f;
-    }
-    if (output == AUDIO_IO_HANDLE_NONE) {
-        return 0.0f;
-    }
-
-    audio_utils::lock_guard lock(mutex());
-    sp<VolumeInterface> volumeInterface = getVolumeInterface_l(output);
-    if (volumeInterface == NULL) {
-        return 0.0f;
-    }
-
-    return volumeInterface->streamVolume(stream);
-}
-
-bool AudioFlinger::streamMute(audio_stream_type_t stream) const
-{
-    status_t status = checkStreamType(stream);
-    if (status != NO_ERROR) {
-        return true;
-    }
-
-    audio_utils::lock_guard lock(mutex());
-    return streamMute_l(stream);
-}
-
-
 void AudioFlinger::broadcastParametersToRecordThreads_l(const String8& keyValuePairs)
 {
     for (size_t i = 0; i < mRecordThreads.size(); i++) {
@@ -2083,24 +2129,23 @@
 
 void AudioFlinger::registerClient(const sp<media::IAudioFlingerClient>& client)
 {
-    audio_utils::lock_guard _l(mutex());
     if (client == 0) {
         return;
     }
-    pid_t pid = IPCThreadState::self()->getCallingPid();
+    const pid_t pid = IPCThreadState::self()->getCallingPid();
     const uid_t uid = IPCThreadState::self()->getCallingUid();
+
+    audio_utils::lock_guard _l(mutex());
     {
         audio_utils::lock_guard _cl(clientMutex());
-        if (mNotificationClients.indexOfKey(pid) < 0) {
-            sp<NotificationClient> notificationClient = new NotificationClient(this,
-                                                                                client,
-                                                                                pid,
-                                                                                uid);
-            ALOGV("registerClient() client %p, pid %d, uid %u",
-                    notificationClient.get(), pid, uid);
+        if (mNotificationClients.count(pid) == 0) {
+            const mediautils::UidInfo::Info info = mUidInfo.getInfo(uid);
+            sp<NotificationClient> notificationClient = sp<NotificationClient>::make(
+                    this, client, pid, uid, info.package);
+            ALOGV("registerClient() pid %d, uid %u, package %s",
+                    pid, uid, info.package.c_str());
 
-            mNotificationClients.add(pid, notificationClient);
-
+            mNotificationClients[pid] = notificationClient;
             sp<IBinder> binder = IInterface::asBinder(client);
             binder->linkToDeath(notificationClient);
         }
@@ -2127,7 +2172,7 @@
         audio_utils::lock_guard _l(mutex());
         {
             audio_utils::lock_guard _cl(clientMutex());
-            mNotificationClients.removeItem(pid);
+            mNotificationClients.erase(pid);
         }
 
         ALOGV("%d died, releasing its sessions", pid);
@@ -2168,11 +2213,13 @@
             legacy2aidl_AudioIoDescriptor_AudioIoDescriptor(ioDesc));
 
     audio_utils::lock_guard _l(clientMutex());
-    size_t size = mNotificationClients.size();
-    for (size_t i = 0; i < size; i++) {
-        if ((pid == 0) || (mNotificationClients.keyAt(i) == pid)) {
-            mNotificationClients.valueAt(i)->audioFlingerClient()->ioConfigChanged(eventAidl,
-                                                                                   descAidl);
+    if (pid != 0) {
+        if (auto it = mNotificationClients.find(pid); it != mNotificationClients.end()) {
+            it->second->audioFlingerClient()->ioConfigChanged(eventAidl, descAidl);
+        }
+    } else {
+        for (const auto& [ client_pid, client] : mNotificationClients) {
+            client->audioFlingerClient()->ioConfigChanged(eventAidl, descAidl);
         }
     }
 }
@@ -2186,9 +2233,8 @@
 
     audio_utils::lock_guard _l(clientMutex());
     size_t size = mNotificationClients.size();
-    for (size_t i = 0; i < size; i++) {
-        mNotificationClients.valueAt(i)->audioFlingerClient()
-                ->onSupportedLatencyModesChanged(outputAidl, modesAidl);
+    for (const auto& [_, client] : mNotificationClients) {
+        client->audioFlingerClient()->onSupportedLatencyModesChanged(outputAidl, modesAidl);
     }
 }
 
@@ -2199,6 +2245,12 @@
     }
 }
 
+const IPermissionProvider& AudioFlinger::getPermissionProvider() {
+    // This is inited as part of service construction, prior to binder registration,
+    // so it should always be non-null.
+    return mAudioPolicyServiceLocal.load()->getPermissionProvider();
+}
+
 // removeClient_l() must be called with AudioFlinger::clientMutex() held
 void AudioFlinger::removeClient_l(pid_t pid)
 {
@@ -2239,8 +2291,10 @@
 AudioFlinger::NotificationClient::NotificationClient(const sp<AudioFlinger>& audioFlinger,
                                                      const sp<media::IAudioFlingerClient>& client,
                                                      pid_t pid,
-                                                     uid_t uid)
-    : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid), mAudioFlingerClient(client)
+        uid_t uid,
+        std::string_view packageName)
+    : mAudioFlinger(audioFlinger), mPid(pid), mUid(uid)
+    , mPackageName(packageName), mAudioFlingerClient(client)
 {
 }
 
@@ -2250,7 +2304,7 @@
 
 void AudioFlinger::NotificationClient::binderDied(const wp<IBinder>& who __unused)
 {
-    sp<NotificationClient> keep(this);
+    const auto keep = sp<NotificationClient>::fromExisting(this);
     mAudioFlinger->removeNotificationClient(mPid);
 }
 
@@ -2308,30 +2362,43 @@
     output.buffers.clear();
     output.inputId = AUDIO_IO_HANDLE_NONE;
 
-    // TODO b/182392553: refactor or clean up
-    AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    bool updatePid = (adjAttributionSource.pid == -1);
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
-           adjAttributionSource.uid));
-    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        ALOGW_IF(currentUid != callingUid,
-                "%s uid %d tried to pass itself off as %d",
-                __FUNCTION__, callingUid, currentUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-        updatePid = true;
+    AttributionSourceState adjAttributionSource;
+    pid_t callingPid = IPCThreadState::self()->getCallingPid();
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = input.clientInfo.attributionSource;
+        bool updatePid = (adjAttributionSource.pid == -1);
+        const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+        const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
+               adjAttributionSource.uid));
+        if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            ALOGW_IF(currentUid != callingUid,
+                    "%s uid %d tried to pass itself off as %d",
+                    __FUNCTION__, callingUid, currentUid);
+            adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_uid_t_int32_t(callingUid));
+            updatePid = true;
+        }
+        const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
+                adjAttributionSource.pid));
+        if (updatePid) {
+            ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, currentPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(
+                    input.clientInfo.attributionSource,
+                    getPermissionProvider()
+                    ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
     }
-    const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-    const pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(
-            adjAttributionSource.pid));
-    if (updatePid) {
-        ALOGW_IF(currentPid != (pid_t)-1 && currentPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, currentPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-    }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(
-            adjAttributionSource);
+
     // further format checks are performed by createRecordTrack_l()
     if (!audio_is_valid_format(input.config.format)) {
         ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -2923,7 +2990,8 @@
                                                         audio_config_base_t *mixerConfig,
                                                         audio_devices_t deviceType,
                                                         const String8& address,
-                                                        audio_output_flags_t flags)
+                                                        audio_output_flags_t flags,
+                                                        const audio_attributes_t attributes)
 {
     AudioHwDevice *outHwDev = findSuitableHwDev_l(module, deviceType);
     if (outHwDev == NULL) {
@@ -2941,13 +3009,18 @@
 
     mHardwareStatus = AUDIO_HW_OUTPUT_OPEN;
     AudioStreamOut *outputStream = NULL;
+
+    playback_track_metadata_v7_t trackMetadata;
+    trackMetadata.base.usage = attributes.usage;
+
     status_t status = outHwDev->openOutputStream(
             &outputStream,
             *output,
             deviceType,
             flags,
             halConfig,
-            address.c_str());
+            address.c_str(),
+            {trackMetadata});
 
     mHardwareStatus = AUDIO_HW_IDLE;
 
@@ -3016,6 +3089,8 @@
             aidl2legacy_DeviceDescriptorBase(request.device));
     audio_output_flags_t flags = VALUE_OR_RETURN_STATUS(
             aidl2legacy_int32_t_audio_output_flags_t_mask(request.flags));
+    audio_attributes_t attributes = VALUE_OR_RETURN_STATUS(
+            aidl2legacy_AudioAttributes_audio_attributes_t(request.attributes));
 
     audio_io_handle_t output;
 
@@ -3038,7 +3113,7 @@
     audio_utils::lock_guard _l(mutex());
 
     const sp<IAfThreadBase> thread = openOutput_l(module, &output, &halConfig,
-            &mixerConfig, deviceType, address, flags);
+            &mixerConfig, deviceType, address, flags, attributes);
     if (thread != 0) {
         uint32_t latencyMs = 0;
         if ((flags & AUDIO_OUTPUT_FLAG_MMAP_NOIRQ) == 0) {
@@ -3496,7 +3571,7 @@
         // is likely proxied by mediaserver (e.g CameraService) and releaseAudioSessionId() can be
         // called from a different pid leaving a stale session reference.  Also we don't know how
         // to clear this reference if the client process dies.
-        if (mNotificationClients.indexOfKey(caller) < 0) {
+        if (mNotificationClients.count(caller) == 0) {
             ALOGW("acquireAudioSessionId() unknown client %d for session %d", caller, audioSession);
             return;
         }
@@ -3940,8 +4015,12 @@
         // TODO: We could check compatibility of the secondaryThread with the PatchTrack
         // for fast usage: thread has fast mixer, sample rate matches, etc.;
         // for now, we exclude fast tracks by removing the Fast flag.
+        constexpr audio_output_flags_t kIncompatiblePatchTrackFlags =
+                static_cast<audio_output_flags_t>(AUDIO_OUTPUT_FLAG_FAST
+                        | AUDIO_OUTPUT_FLAG_DIRECT | AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD);
+
         const audio_output_flags_t outputFlags =
-                (audio_output_flags_t)(track->getOutputFlags() & ~AUDIO_OUTPUT_FLAG_FAST);
+                (audio_output_flags_t)(track->getOutputFlags() & ~kIncompatiblePatchTrackFlags);
         sp<IAfPatchTrack> patchTrack = IAfPatchTrack::create(secondaryThread,
                                                        track->streamType(),
                                                        track->sampleRate(),
@@ -4121,20 +4200,31 @@
     int idOut = -1;
 
     status_t lStatus = NO_ERROR;
-
-    // TODO b/182392553: refactor or make clearer
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
-    pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
-    if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
-        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
-        ALOGW_IF(currentPid != -1 && currentPid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, callingUid, callingPid, currentPid);
-        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
-        currentPid = callingPid;
+    uid_t callingUid = IPCThreadState::self()->getCallingUid();
+    pid_t currentPid;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+        currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
+        if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+            const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+            ALOGW_IF(currentPid != -1 && currentPid != callingPid,
+                     "%s uid %d pid %d tried to pass itself off as pid %d",
+                     __func__, callingUid, callingPid, currentPid);
+            adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(
+                    legacy2aidl_pid_t_int32_t(callingPid));
+            currentPid = callingPid;
+        }
+        adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+    } else {
+        auto validatedAttrSource = VALUE_OR_RETURN_CONVERTED(
+                validateAttributionFromContextOrTrustedCaller(request.attributionSource,
+                getPermissionProvider()
+                ));
+        // TODO pass wrapped object around
+        adjAttributionSource = std::move(validatedAttrSource).unwrapInto();
+        currentPid = adjAttributionSource.pid;
     }
-    adjAttributionSource = afutils::checkAttributionSourcePackage(adjAttributionSource);
+
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
           adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
diff --git a/services/audioflinger/AudioFlinger.h b/services/audioflinger/AudioFlinger.h
index dcad0e2..b57a355 100644
--- a/services/audioflinger/AudioFlinger.h
+++ b/services/audioflinger/AudioFlinger.h
@@ -96,10 +96,6 @@
     status_t setStreamMute(audio_stream_type_t stream, bool muted) final
             EXCLUDES_AudioFlinger_Mutex;
 
-    float streamVolume(audio_stream_type_t stream,
-            audio_io_handle_t output) const final EXCLUDES_AudioFlinger_Mutex;
-    bool streamMute(audio_stream_type_t stream) const final EXCLUDES_AudioFlinger_Mutex;
-
     status_t setMode(audio_mode_t mode) final EXCLUDES_AudioFlinger_Mutex;
 
     status_t setMicMute(bool state) final EXCLUDES_AudioFlinger_Mutex;
@@ -337,7 +333,8 @@
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) final REQUIRES(mutex());
+            audio_output_flags_t flags,
+            audio_attributes_t attributes) final REQUIRES(mutex());
     const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
             getAudioHwDevs_l() const final REQUIRES(mutex(), hardwareMutex()) {
               return mAudioHwDevs;
@@ -409,6 +406,8 @@
     void onHardError(std::set<audio_port_handle_t>& trackPortIds) final
             EXCLUDES_AudioFlinger_ClientMutex;
 
+    const ::com::android::media::permission::IPermissionProvider& getPermissionProvider() final;
+
     // ---- end of IAfThreadCallback interface
 
     /* List available audio ports and their attributes */
@@ -480,12 +479,14 @@
                             NotificationClient(const sp<AudioFlinger>& audioFlinger,
                                                 const sp<media::IAudioFlingerClient>& client,
                                                 pid_t pid,
-                                                uid_t uid);
+                uid_t uid,
+                std::string_view packageName);
         virtual             ~NotificationClient();
 
                 sp<media::IAudioFlingerClient> audioFlingerClient() const { return mAudioFlingerClient; }
                 pid_t getPid() const { return mPid; }
                 uid_t getUid() const { return mUid; }
+                const std::string& getPackageName() const { return mPackageName; }
 
                 // IBinder::DeathRecipient
                 virtual     void        binderDied(const wp<IBinder>& who);
@@ -496,6 +497,7 @@
         const sp<AudioFlinger>  mAudioFlinger;
         const pid_t             mPid;
         const uid_t             mUid;
+        const std::string mPackageName;
         const sp<media::IAudioFlingerClient> mAudioFlingerClient;
     };
 
@@ -695,8 +697,7 @@
 
     DefaultKeyedVector<audio_io_handle_t, sp<IAfRecordThread>> mRecordThreads GUARDED_BY(mutex());
 
-    DefaultKeyedVector<pid_t, sp<NotificationClient>> mNotificationClients
-            GUARDED_BY(clientMutex());
+    std::map<pid_t, sp<NotificationClient>> mNotificationClients GUARDED_BY(clientMutex());
 
                 // updated by atomic_fetch_add_explicit
     volatile atomic_uint_fast32_t mNextUniqueIds[AUDIO_UNIQUE_ID_USE_MAX];  // ctor init
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index cfd0a5e..6273570 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -39,6 +39,7 @@
 #include <mediautils/MethodStatistics.h>
 #include <mediautils/ServiceUtilities.h>
 #include <mediautils/TimeCheck.h>
+#include <system/audio_effects/audio_effects_utils.h>
 #include <system/audio_effects/effect_aec.h>
 #include <system/audio_effects/effect_downmix.h>
 #include <system/audio_effects/effect_dynamicsprocessing.h>
@@ -70,6 +71,7 @@
 namespace android {
 
 using aidl_utils::statusTFromBinderStatus;
+using android::effect::utils::EffectParamWriter;
 using audioflinger::EffectConfiguration;
 using binder::Status;
 
@@ -1585,16 +1587,27 @@
         return INVALID_OPERATION;
     }
 
-    std::vector<uint8_t> request(sizeof(effect_param_t) + 3 * sizeof(uint32_t) + sizeof(float));
-    effect_param_t *param = (effect_param_t*) request.data();
-    param->psize = sizeof(int32_t);
-    param->vsize = sizeof(int32_t) * 2 + sizeof(float);
-    *(int32_t*)param->data = HG_PARAM_HAPTIC_INTENSITY;
-    int32_t* hapticScalePtr = reinterpret_cast<int32_t*>(param->data + sizeof(int32_t));
-    hapticScalePtr[0] = id;
-    hapticScalePtr[1] = static_cast<int32_t>(hapticScale.getLevel());
-    float* adaptiveScaleFactorPtr = reinterpret_cast<float*>(param->data + 3 * sizeof(int32_t));
-    *adaptiveScaleFactorPtr = hapticScale.getAdaptiveScaleFactor();
+    // Scale param fields
+    int32_t intensityParam = static_cast<int32_t>(HG_PARAM_HAPTIC_INTENSITY);
+    int32_t scaleLevel = static_cast<int32_t>(hapticScale.getLevel());
+    float scaleFactor = hapticScale.getScaleFactor();
+    float adaptiveScaleFactor = hapticScale.getAdaptiveScaleFactor();
+
+    size_t psize = sizeof(int32_t); // HG_PARAM_HAPTIC_INTENSITY
+    size_t vsize = 2 * sizeof(int32_t) + 2 * sizeof(float); // id + scale fields
+    std::vector<uint8_t> request(sizeof(effect_param_t) + psize + vsize);
+    effect_param_t *effectParam = (effect_param_t*) request.data();
+    effectParam->psize = psize;
+    effectParam->vsize = vsize;
+
+    EffectParamWriter writer(*effectParam);
+    writer.writeToParameter(&intensityParam);
+    writer.writeToValue(&id);
+    writer.writeToValue(&scaleLevel);
+    writer.writeToValue(&scaleFactor);
+    writer.writeToValue(&adaptiveScaleFactor);
+    writer.finishValueWrite();
+
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1613,17 +1626,21 @@
         return INVALID_OPERATION;
     }
 
-    const size_t paramCount = 3;
-    std::vector<uint8_t> request(
-            sizeof(effect_param_t) + sizeof(int32_t) + paramCount * sizeof(float));
-    effect_param_t *param = (effect_param_t*) request.data();
-    param->psize = sizeof(int32_t);
-    param->vsize = paramCount * sizeof(float);
-    *(int32_t*)param->data = HG_PARAM_VIBRATOR_INFO;
-    float* vibratorInfoPtr = reinterpret_cast<float*>(param->data + sizeof(int32_t));
-    vibratorInfoPtr[0] = vibratorInfo.resonantFrequency;
-    vibratorInfoPtr[1] = vibratorInfo.qFactor;
-    vibratorInfoPtr[2] = vibratorInfo.maxAmplitude;
+    size_t psize = sizeof(int32_t); // HG_PARAM_VIBRATOR_INFO
+    size_t vsize = 3 * sizeof(float); // resonantFrequency + qFactor + maxAmplitude
+    std::vector<uint8_t> request(sizeof(effect_param_t) + psize + vsize);
+    effect_param_t *effectParam = (effect_param_t*) request.data();
+    effectParam->psize = psize;
+    effectParam->vsize = vsize;
+
+    int32_t infoParam = static_cast<int32_t>(HG_PARAM_VIBRATOR_INFO);
+    EffectParamWriter writer(*effectParam);
+    writer.writeToParameter(&infoParam);
+    writer.writeToValue(&vibratorInfo.resonantFrequency);
+    writer.writeToValue(&vibratorInfo.qFactor);
+    writer.writeToValue(&vibratorInfo.maxAmplitude);
+    writer.finishValueWrite();
+
     std::vector<uint8_t> response;
     status_t status = command(EFFECT_CMD_SET_PARAM, request, sizeof(int32_t), &response);
     if (status == NO_ERROR) {
@@ -1773,7 +1790,7 @@
                                          int32_t priority, bool notifyFramesProcessed,
                                          bool isInternal,
                                          audio_utils::MutexOrder mutexOrder)
-    : BnEffect(), mMutex(audio_utils::mutex{mutexOrder}),
+    : BnEffect(), mMutex(mutexOrder),
     mEffect(effect), mEffectClient(media::EffectClientAsyncProxy::makeIfNeeded(effectClient)),
     mClient(client), mCblk(nullptr),
     mPriority(priority), mHasControl(false), mEnabled(false), mDisconnected(false),
@@ -3557,7 +3574,7 @@
                 mHalEffect->setDevices({mDevice});
             }
         }
-        *handle = new InternalEffectHandle(mHalEffect, mNotifyFramesProcessed);
+        *handle = sp<InternalEffectHandle>::make(mHalEffect, mNotifyFramesProcessed);
         status = (*handle)->initCheck();
         if (status == OK) {
             status = mHalEffect->addHandle((*handle).get());
diff --git a/services/audioflinger/IAfPatchPanel.h b/services/audioflinger/IAfPatchPanel.h
index 6110e4c..37dce3a 100644
--- a/services/audioflinger/IAfPatchPanel.h
+++ b/services/audioflinger/IAfPatchPanel.h
@@ -82,7 +82,8 @@
             audio_config_base_t* mixerConfig,
             audio_devices_t deviceType,
             const String8& address,
-            audio_output_flags_t flags) REQUIRES(mutex()) = 0;
+            audio_output_flags_t flags,
+            audio_attributes_t attributes) REQUIRES(mutex()) = 0;
     virtual audio_utils::mutex& mutex() const
             RETURN_CAPABILITY(audio_utils::AudioFlinger_Mutex) = 0;
     virtual const DefaultKeyedVector<audio_module_handle_t, AudioHwDevice*>&
diff --git a/services/audioflinger/IAfThread.h b/services/audioflinger/IAfThread.h
index a7da658..4d26aa0 100644
--- a/services/audioflinger/IAfThread.h
+++ b/services/audioflinger/IAfThread.h
@@ -37,6 +37,10 @@
 
 #include <optional>
 
+namespace com::android::media::permission {
+    class IPermissionProvider;
+}
+
 namespace android {
 
 class IAfDirectOutputThread;
@@ -122,6 +126,9 @@
             EXCLUDES_AudioFlinger_ClientMutex = 0;
 
     virtual void onHardError(std::set<audio_port_handle_t>& trackPortIds) = 0;
+
+    virtual const ::com::android::media::permission::IPermissionProvider&
+            getPermissionProvider() = 0;
 };
 
 class IAfThreadBase : public virtual RefBase {
diff --git a/services/audioflinger/PatchPanel.cpp b/services/audioflinger/PatchPanel.cpp
index f57470f..35f17c1 100644
--- a/services/audioflinger/PatchPanel.cpp
+++ b/services/audioflinger/PatchPanel.cpp
@@ -260,6 +260,7 @@
                     if (patch->sinks[0].config_mask & AUDIO_PORT_CONFIG_FLAGS) {
                         flags = patch->sinks[0].flags.output;
                     }
+                    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
                     const sp<IAfThreadBase> thread = mAfPatchPanelCallback->openOutput_l(
                                                             patch->sinks[0].ext.device.hw_module,
                                                             &output,
@@ -267,7 +268,8 @@
                                                             &mixerConfig,
                                                             outputDevice,
                                                             outputDeviceAddress,
-                                                            flags);
+                                                            flags,
+                                                            attributes);
                     ALOGV("mAfPatchPanelCallback->openOutput_l() returned %p", thread.get());
                     if (thread == 0) {
                         status = NO_MEMORY;
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index ecbd0ae..583552a 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -72,6 +72,7 @@
 #include <media/nbaio/Pipe.h>
 #include <media/nbaio/PipeReader.h>
 #include <media/nbaio/SourceAudioBufferProvider.h>
+#include <media/ValidatedAttributionSourceState.h>
 #include <mediautils/BatteryNotifier.h>
 #include <mediautils/Process.h>
 #include <mediautils/SchedulingPolicyService.h>
@@ -120,6 +121,8 @@
     return a < b ? a : b;
 }
 
+using com::android::media::permission::ValidatedAttributionSourceState;
+
 namespace android {
 
 using audioflinger::SyncEvent;
@@ -162,6 +165,9 @@
 
 // maximum time to wait in sendConfigEvent_l() for a status to be received
 static const nsecs_t kConfigEventTimeoutNs = seconds(2);
+// longer timeout for create audio patch to account for specific scenarii
+// with Bluetooth devices
+static const nsecs_t kCreatePatchEventTimeoutNs = seconds(4);
 
 // minimum sleep time for the mixer thread loop when tracks are active but in underrun
 static const uint32_t kMinThreadSleepTimeUs = 5000;
@@ -728,9 +734,11 @@
     mutex().unlock();
     {
         audio_utils::unique_lock _l(event->mutex());
+        nsecs_t timeoutNs = event->mType == CFG_EVENT_CREATE_AUDIO_PATCH ?
+              kCreatePatchEventTimeoutNs : kConfigEventTimeoutNs;
         while (event->mWaitStatus) {
             if (event->mCondition.wait_for(
-                    _l, std::chrono::nanoseconds(kConfigEventTimeoutNs), getTid())
+                    _l, std::chrono::nanoseconds(timeoutNs), getTid())
                             == std::cv_status::timeout) {
                 event->mStatus = TIMED_OUT;
                 event->mWaitStatus = false;
@@ -5201,7 +5209,7 @@
                                                     // audio to FastMixer
         fastTrack->mFormat = mFormat; // mPipeSink format for audio to FastMixer
         fastTrack->mHapticPlaybackEnabled = mHapticChannelMask != AUDIO_CHANNEL_NONE;
-        fastTrack->mHapticScale = {/*level=*/os::HapticLevel::NONE };
+        fastTrack->mHapticScale = os::HapticScale::none();
         fastTrack->mHapticMaxAmplitude = NAN;
         fastTrack->mGeneration++;
         state->mFastTracksGen++;
@@ -7143,7 +7151,7 @@
 uint32_t DirectOutputThread::activeSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = PlaybackThread::activeSleepTimeUs();
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7154,7 +7162,7 @@
 uint32_t DirectOutputThread::idleSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000) / 2;
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7165,7 +7173,7 @@
 uint32_t DirectOutputThread::suspendSleepTimeUs() const
 {
     uint32_t time;
-    if (audio_has_proportional_frames(mFormat)) {
+    if (audio_has_proportional_frames(mFormat) && mType != OFFLOAD) {
         time = (uint32_t)(((mFrameCount * 1000) / mSampleRate) * 1000);
     } else {
         time = kDirectMinSleepTimeUs;
@@ -7182,7 +7190,7 @@
     // no delay on outputs with HW A/V sync
     if (usesHwAvSync()) {
         mStandbyDelayNs = 0;
-    } else if ((mType == OFFLOAD) && !audio_has_proportional_frames(mFormat)) {
+    } else if (mType == OFFLOAD) {
         mStandbyDelayNs = kOffloadStandbyDelayNs;
     } else {
         mStandbyDelayNs = microseconds(mActiveSleepTimeUs*2);
@@ -10302,8 +10310,23 @@
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
 
     audio_io_handle_t io = mId;
-    const AttributionSourceState adjAttributionSource = afutils::checkAttributionSourcePackage(
-            client.attributionSource);
+    AttributionSourceState adjAttributionSource;
+    if (!com::android::media::audio::audioserver_permissions()) {
+        adjAttributionSource = afutils::checkAttributionSourcePackage(
+                client.attributionSource);
+    } else {
+        // TODO(b/342475009) validate in oboeservice, and plumb downwards
+        auto validatedRes = ValidatedAttributionSourceState::createFromTrustedUidNoPackage(
+                    client.attributionSource,
+                    mAfThreadCallback->getPermissionProvider()
+                );
+        if (!validatedRes.has_value()) {
+            ALOGE("MMAP client package validation fail: %s",
+                    validatedRes.error().toString8().c_str());
+            return aidl_utils::statusTFromBinderStatus(validatedRes.error());
+        }
+        adjAttributionSource = std::move(validatedRes.value()).unwrapInto();
+    }
 
     const auto localSessionId = mSessionId;
     auto localAttr = mAttr;
diff --git a/services/audioflinger/datapath/AudioHwDevice.cpp b/services/audioflinger/datapath/AudioHwDevice.cpp
index 95e9ecc..5314e9e 100644
--- a/services/audioflinger/datapath/AudioHwDevice.cpp
+++ b/services/audioflinger/datapath/AudioHwDevice.cpp
@@ -43,7 +43,8 @@
         audio_devices_t deviceType,
         audio_output_flags_t flags,
         struct audio_config *config,
-        const char *address)
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
 
     struct audio_config originalConfig = *config;
@@ -52,7 +53,7 @@
     // Try to open the HAL first using the current format.
     ALOGV("openOutputStream(), try sampleRate %d, format %#x, channelMask %#x", config->sample_rate,
             config->format, config->channel_mask);
-    status_t status = outputStream->open(handle, deviceType, config, address);
+    status_t status = outputStream->open(handle, deviceType, config, address, sourceMetadata);
 
     if (status != NO_ERROR) {
         delete outputStream;
@@ -72,7 +73,8 @@
         if (wrapperNeeded) {
             if (SPDIFEncoder::isFormatSupported(originalConfig.format)) {
                 outputStream = new SpdifStreamOut(this, flags, originalConfig.format);
-                status = outputStream->open(handle, deviceType, &originalConfig, address);
+                status = outputStream->open(handle, deviceType, &originalConfig, address,
+                                            sourceMetadata);
                 if (status != NO_ERROR) {
                     ALOGE("ERROR - openOutputStream(), SPDIF open returned %d",
                         status);
diff --git a/services/audioflinger/datapath/AudioHwDevice.h b/services/audioflinger/datapath/AudioHwDevice.h
index 80c1473..e1a9018 100644
--- a/services/audioflinger/datapath/AudioHwDevice.h
+++ b/services/audioflinger/datapath/AudioHwDevice.h
@@ -87,7 +87,8 @@
             audio_devices_t deviceType,
             audio_output_flags_t flags,
             struct audio_config *config,
-            const char *address);
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
 
     status_t openInputStream(
             AudioStreamIn **ppStreamIn,
diff --git a/services/audioflinger/datapath/AudioStreamOut.cpp b/services/audioflinger/datapath/AudioStreamOut.cpp
index a686ff6..c65373e 100644
--- a/services/audioflinger/datapath/AudioStreamOut.cpp
+++ b/services/audioflinger/datapath/AudioStreamOut.cpp
@@ -93,7 +93,8 @@
         audio_io_handle_t handle,
         audio_devices_t deviceType,
         struct audio_config *config,
-        const char *address)
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
     sp<StreamOutHalInterface> outStream;
 
@@ -107,7 +108,8 @@
             customFlags,
             config,
             address,
-            &outStream);
+            &outStream,
+            sourceMetadata);
     ALOGV("AudioStreamOut::open(), HAL returned stream %p, sampleRate %d, format %#x,"
             " channelMask %#x, status %d", outStream.get(), config->sample_rate, config->format,
             config->channel_mask, status);
@@ -124,7 +126,8 @@
                 customFlags,
                 &customConfig,
                 address,
-                &outStream);
+                &outStream,
+                sourceMetadata);
         ALOGV("AudioStreamOut::open(), treat IEC61937 as PCM, status = %d", status);
     }
 
diff --git a/services/audioflinger/datapath/AudioStreamOut.h b/services/audioflinger/datapath/AudioStreamOut.h
index 2c9fb3e..2bf94a1 100644
--- a/services/audioflinger/datapath/AudioStreamOut.h
+++ b/services/audioflinger/datapath/AudioStreamOut.h
@@ -47,7 +47,8 @@
             audio_io_handle_t handle,
             audio_devices_t deviceType,
             struct audio_config *config,
-            const char *address);
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata);
 
     virtual ~AudioStreamOut();
 
diff --git a/services/audioflinger/datapath/SpdifStreamOut.cpp b/services/audioflinger/datapath/SpdifStreamOut.cpp
index 65a4eec..d3983b0 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.cpp
+++ b/services/audioflinger/datapath/SpdifStreamOut.cpp
@@ -45,7 +45,8 @@
         audio_io_handle_t handle,
         audio_devices_t devices,
         struct audio_config *config,
-        const char *address)
+        const char *address,
+        const std::vector<playback_track_metadata_v7_t>& sourceMetadata)
 {
     struct audio_config customConfig = *config;
 
@@ -75,7 +76,8 @@
             handle,
             devices,
             &customConfig,
-            address);
+            address,
+            sourceMetadata);
 
     ALOGI("SpdifStreamOut::open() status = %d", status);
 
diff --git a/services/audioflinger/datapath/SpdifStreamOut.h b/services/audioflinger/datapath/SpdifStreamOut.h
index c6d27ba..1cd8f65 100644
--- a/services/audioflinger/datapath/SpdifStreamOut.h
+++ b/services/audioflinger/datapath/SpdifStreamOut.h
@@ -43,7 +43,8 @@
             audio_io_handle_t handle,
             audio_devices_t devices,
             struct audio_config *config,
-            const char *address) override;
+            const char *address,
+            const std::vector<playback_track_metadata_v7_t>& sourceMetadata) override;
 
     /**
     * Write audio buffer to driver. Returns number of bytes written, or a
diff --git a/services/audiopolicy/AudioPolicyInterface.h b/services/audiopolicy/AudioPolicyInterface.h
index deb7345..1bac259 100644
--- a/services/audiopolicy/AudioPolicyInterface.h
+++ b/services/audiopolicy/AudioPolicyInterface.h
@@ -477,7 +477,8 @@
                                 audio_config_base_t *mixerConfig,
                                 const sp<DeviceDescriptorBase>& device,
                                 uint32_t *latencyMs,
-                                audio_output_flags_t flags) = 0;
+                                audio_output_flags_t flags,
+                                audio_attributes_t audioAttributes) = 0;
     // creates a special output that is duplicated to the two outputs passed as arguments.
     // The duplication is performed by a special mixer thread in the AudioFlinger.
     virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1,
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
index c26ea10..0f2fe24 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioInputDescriptor.h
@@ -41,7 +41,8 @@
 {
 public:
     AudioInputDescriptor(const sp<IOProfile>& profile,
-                         AudioPolicyClientInterface *clientInterface);
+                         AudioPolicyClientInterface *clientInterface,
+                         bool isPreemptor);
 
     virtual ~AudioInputDescriptor() = default;
 
@@ -127,6 +128,8 @@
     // active use case
     void checkSuspendEffects();
 
+    bool isPreemptor() const { return mIsPreemptor; }
+
  private:
 
     void updateClientRecordingConfiguration(int event, const sp<RecordClientDescriptor>& client);
@@ -145,6 +148,7 @@
     int32_t mGlobalActiveCount = 0;  // non-client-specific activity ref count
     EffectDescriptorCollection mEnabledEffects;
     audio_input_flags_t& mFlags = AudioPortConfig::mFlags.input;
+    bool mIsPreemptor; // true if this input was opened after preemting another one
 };
 
 class AudioInputCollection :
diff --git a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
index 914f3fe..bfb28a5 100644
--- a/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
+++ b/services/audiopolicy/common/managerdefinitions/include/AudioOutputDescriptor.h
@@ -413,7 +413,8 @@
                       const DeviceVector &devices,
                       audio_stream_type_t stream,
                       audio_output_flags_t flags,
-                      audio_io_handle_t *output);
+                      audio_io_handle_t *output,
+                      audio_attributes_t attributes);
 
         // Called when a stream is about to be started
         // Note: called before setClientActive(true);
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 44f84b9..5a0fd97 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -30,9 +30,10 @@
 namespace android {
 
 AudioInputDescriptor::AudioInputDescriptor(const sp<IOProfile>& profile,
-                                           AudioPolicyClientInterface *clientInterface)
+                                           AudioPolicyClientInterface *clientInterface,
+                                           bool isPreemptor)
     : mProfile(profile)
-    ,  mClientInterface(clientInterface)
+    ,  mClientInterface(clientInterface), mIsPreemptor(isPreemptor)
 {
     if (profile != NULL) {
         profile->pickAudioProfile(mSamplingRate, mChannelMask, mFormat);
@@ -275,6 +276,9 @@
                             "%s invalid profile active count %u",
                             __func__, mProfile->curActiveCount);
         mProfile->curActiveCount--;
+        // allow preemption again now that at least one client was able to
+        // capture on this input
+        mIsPreemptor = false;
     }
 }
 
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 68b2e7b..6fef215 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -524,6 +524,14 @@
     StreamTypeVector streams = streamTypes;
     if (!AudioOutputDescriptor::setVolume(
             volumeDb, muted, vs, streamTypes, deviceTypes, delayMs, force, isVoiceVolSrc)) {
+        if (hasStream(streamTypes, AUDIO_STREAM_BLUETOOTH_SCO)) {
+            VolumeSource callVolSrc = getVoiceSource();
+            if (callVolSrc != VOLUME_SOURCE_NONE && volumeDb != getCurVolume(callVolSrc)) {
+                setCurVolume(callVolSrc, volumeDb, true);
+                mClientInterface->setStreamVolume(
+                        AUDIO_STREAM_VOICE_CALL, Volume::DbToAmpl(volumeDb), mIoHandle, delayMs);
+            }
+        }
         return false;
     }
     if (streams.empty()) {
@@ -588,7 +596,8 @@
                                        const DeviceVector &devices,
                                        audio_stream_type_t stream,
                                        audio_output_flags_t flags,
-                                       audio_io_handle_t *output)
+                                       audio_io_handle_t *output,
+                                       audio_attributes_t attributes)
 {
     mDevices = devices;
     sp<DeviceDescriptor> device = devices.getDeviceForOpening();
@@ -652,7 +661,8 @@
                                                    &lMixerConfig,
                                                    device,
                                                    &mLatency,
-                                                   mFlags);
+                                                   mFlags,
+                                                   attributes);
 
     if (status == NO_ERROR) {
         LOG_ALWAYS_FATAL_IF(*output == AUDIO_IO_HANDLE_NONE,
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index b04aff0..daef691 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -463,10 +463,13 @@
 
         // LE audio broadcast device is only used if:
         // - No call is active
-        // - either MEDIA or SONIFICATION_RESPECTFUL is the highest priority active strategy
-        //   OR the LE audio unicast device is not active
+        // - the highest priority active strategy is not PHONE or TRANSMITTED_THROUGH_SPEAKER
+        // OR the LE audio unicast device is not active
         if (devices2.isEmpty() && !isInCall()
-                && (strategy == STRATEGY_MEDIA || strategy == STRATEGY_SONIFICATION_RESPECTFUL)) {
+                // also skipping routing queries from PHONE and TRANSMITTED_THROUGH_SPEAKER here
+                // so this code is not dependent on breaks for other strategies above
+                && (strategy != STRATEGY_PHONE)
+                && (strategy != STRATEGY_TRANSMITTED_THROUGH_SPEAKER)) {
             legacy_strategy topActiveStrategy = STRATEGY_NONE;
             for (const auto &ps : getOrderedProductStrategies()) {
                 if (outputs.isStrategyActive(ps)) {
@@ -476,8 +479,8 @@
                 }
             }
 
-            if (topActiveStrategy == STRATEGY_NONE || topActiveStrategy == STRATEGY_MEDIA
-                    || topActiveStrategy == STRATEGY_SONIFICATION_RESPECTFUL
+            if ((topActiveStrategy != STRATEGY_PHONE
+                        && topActiveStrategy != STRATEGY_TRANSMITTED_THROUGH_SPEAKER)
                     || !outputs.isAnyDeviceTypeActive(getAudioDeviceOutLeAudioUnicastSet())) {
                 devices2 =
                         availableOutputDevices.getDevicesFromType(AUDIO_DEVICE_OUT_BLE_BROADCAST);
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 8b61f0b..3ac233f 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -66,6 +66,7 @@
 using android::media::audio::common::AudioDeviceAddress;
 using android::media::audio::common::AudioPortDeviceExt;
 using android::media::audio::common::AudioPortExt;
+using com::android::media::audioserver::fix_call_audio_patch;
 using content::AttributionSourceState;
 
 //FIXME: workaround for truncated touch sounds
@@ -708,8 +709,10 @@
     audio_attributes_t attr = { .source = AUDIO_SOURCE_VOICE_COMMUNICATION };
     auto txSourceDevice = mEngine->getInputDeviceForAttributes(attr);
 
-    disconnectTelephonyAudioSource(mCallRxSourceClient);
-    disconnectTelephonyAudioSource(mCallTxSourceClient);
+    if (!fix_call_audio_patch()) {
+        disconnectTelephonyAudioSource(mCallRxSourceClient);
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
+    }
 
     if (rxDevices.isEmpty()) {
         ALOGW("%s() no selected output device", __func__);
@@ -762,6 +765,9 @@
     // Use legacy routing method for voice calls via setOutputDevice() on primary output.
     // Otherwise, create two audio patches for TX and RX path.
     if (!createRxPatch) {
+        if (fix_call_audio_patch()) {
+            disconnectTelephonyAudioSource(mCallRxSourceClient);
+        }
         if (!hasPrimaryOutput()) {
             ALOGW("%s() no primary output available", __func__);
             return INVALID_OPERATION;
@@ -784,6 +790,8 @@
             }
         }
         connectTelephonyTxAudioSource(txSourceDevice, txSinkDevice, delayMs);
+    } else if (fix_call_audio_patch()) {
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
     }
     if (waitMs != nullptr) {
         *waitMs = muteWaitMs;
@@ -805,18 +813,38 @@
 
 void AudioPolicyManager::connectTelephonyRxAudioSource(uint32_t delayMs)
 {
-    disconnectTelephonyAudioSource(mCallRxSourceClient);
+    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
+
+    if (fix_call_audio_patch()) {
+        if (mCallRxSourceClient != nullptr) {
+            DeviceVector rxDevices =
+                  mEngine->getOutputDevicesForAttributes(aa, nullptr, false /*fromCache*/);
+            ALOG_ASSERT(!rxDevices.isEmpty() || !mCallRxSourceClient->isConnected(),
+                        "connectTelephonyRxAudioSource(): no device found for call RX source");
+            sp<DeviceDescriptor> rxDevice = rxDevices.itemAt(0);
+            if (mCallRxSourceClient->isConnected()
+                    && mCallRxSourceClient->sinkDevice()->equals(rxDevice)) {
+                return;
+            }
+            disconnectTelephonyAudioSource(mCallRxSourceClient);
+        }
+    } else {
+        disconnectTelephonyAudioSource(mCallRxSourceClient);
+    }
+
     const struct audio_port_config source = {
         .role = AUDIO_PORT_ROLE_SOURCE, .type = AUDIO_PORT_TYPE_DEVICE,
         .ext.device.type = AUDIO_DEVICE_IN_TELEPHONY_RX, .ext.device.address = ""
     };
-    const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
-
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
+
     status_t status = startAudioSourceInternal(&source, &aa, &portId, 0 /*uid*/,
                                        true /*internal*/, true /*isCallRx*/, delayMs);
     ALOGE_IF(status != OK, "%s: failed to start audio source (%d)", __func__, status);
     mCallRxSourceClient = mAudioSources.valueFor(portId);
+    ALOGV("%s portdID %d between source %s and sink %s", __func__, portId,
+        mCallRxSourceClient->srcDevice()->toString().c_str(),
+        mCallRxSourceClient->sinkDevice()->toString().c_str());
     ALOGE_IF(mCallRxSourceClient == nullptr,
              "%s failed to start Telephony Rx AudioSource", __func__);
 }
@@ -835,15 +863,26 @@
         const sp<DeviceDescriptor> &srcDevice, const sp<DeviceDescriptor> &sinkDevice,
         uint32_t delayMs)
 {
-    disconnectTelephonyAudioSource(mCallTxSourceClient);
     if (srcDevice == nullptr || sinkDevice == nullptr) {
         ALOGW("%s could not create patch, invalid sink and/or source device(s)", __func__);
         return;
     }
+
+    if (fix_call_audio_patch()) {
+        if (mCallTxSourceClient != nullptr) {
+            if (mCallTxSourceClient->isConnected()
+                    && mCallTxSourceClient->srcDevice()->equals(srcDevice)) {
+                return;
+            }
+            disconnectTelephonyAudioSource(mCallTxSourceClient);
+        }
+    } else {
+        disconnectTelephonyAudioSource(mCallTxSourceClient);
+    }
+
     PatchBuilder patchBuilder;
     patchBuilder.addSource(srcDevice).addSink(sinkDevice);
-    ALOGV("%s between source %s and sink %s", __func__,
-            srcDevice->toString().c_str(), sinkDevice->toString().c_str());
+
     auto callTxSourceClientPortId = PolicyAudioPort::getNextUniqueId();
     const auto aa = mEngine->getAttributesForStreamType(AUDIO_STREAM_VOICE_CALL);
 
@@ -860,6 +899,8 @@
                 mCallTxSourceClient, sinkDevice, patchBuilder.patch(), patchHandle, mUidCached,
                 delayMs);
     ALOGE_IF(status != NO_ERROR, "%s() error %d creating TX audio patch", __func__, status);
+    ALOGV("%s portdID %d between source %s and sink %s", __func__, callTxSourceClientPortId,
+        srcDevice->toString().c_str(), sinkDevice->toString().c_str());
     if (status == NO_ERROR) {
         mAudioSources.add(callTxSourceClientPortId, mCallTxSourceClient);
     }
@@ -1251,7 +1292,8 @@
 
     // FIXME: in case of RENDER policy, the output capabilities should be checked
     if ((secondaryMixes != nullptr && !secondaryMixes->empty())
-            && !audio_is_linear_pcm(config->format)) {
+            && (!audio_is_linear_pcm(config->format) ||
+                    *flags & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD)) {
         ALOGD("%s: rejecting request as secondary mixes only support pcm", __func__);
         return BAD_VALUE;
     }
@@ -1276,7 +1318,7 @@
             status = openDirectOutput(
                     *stream, session, config,
                     (audio_output_flags_t)(*flags | AUDIO_OUTPUT_FLAG_DIRECT),
-                    DeviceVector(policyMixDevice), &newOutput);
+                    DeviceVector(policyMixDevice), &newOutput, *resultAttr);
             if (status == NO_ERROR) {
                 policyDesc = mOutputs.valueFor(newOutput);
                 primaryMix->setOutput(policyDesc);
@@ -1514,7 +1556,8 @@
                                               const audio_config_t *config,
                                               audio_output_flags_t flags,
                                               const DeviceVector &devices,
-                                              audio_io_handle_t *output) {
+                                              audio_io_handle_t *output,
+                                              audio_attributes_t attributes) {
 
     *output = AUDIO_IO_HANDLE_NONE;
 
@@ -1602,7 +1645,8 @@
     releaseMsdOutputPatches(devices);
 
     status_t status =
-            outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, flags, output);
+            outputDesc->open(config, nullptr /* mixerConfig */, devices, stream, flags, output,
+                             attributes);
 
     // only accept an output with the requested parameters
     if (status != NO_ERROR ||
@@ -1708,7 +1752,9 @@
 
     audio_config_t directConfig = *config;
     directConfig.channel_mask = channelMask;
-    status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output);
+
+    status_t status = openDirectOutput(stream, session, &directConfig, *flags, devices, &output,
+                                       *attr);
     if (status != NAME_NOT_FOUND) {
         return output;
     }
@@ -3095,43 +3141,115 @@
         }
     }
 
+    bool isPreemptor = false;
     if (!profile->canOpenNewIo()) {
-        for (size_t i = 0; i < mInputs.size(); ) {
-            sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
-            if (desc->mProfile != profile) {
-                i++;
-                continue;
-            }
-            // if sound trigger, reuse input if used by other sound trigger on same session
-            // else
-            //    reuse input if active client app is not in IDLE state
-            //
-            RecordClientVector clients = desc->clientsList();
-            bool doClose = false;
-            for (const auto& client : clients) {
-                if (isSoundTrigger != client->isSoundTrigger()) {
+        if (com::android::media::audioserver::fix_input_sharing_logic()) {
+            //  First pick best candidate for preemption (there may not be any):
+            //  - Preempt and input if:
+            //     - It has only strictly lower priority use cases than the new client
+            //     - It has equal priority use cases than the new client, was not
+            //     opened thanks to preemption or has been active since opened.
+            //  - Order the preemption candidates by inactive first and priority second
+            sp<AudioInputDescriptor> closeCandidate;
+            int leastCloseRank = INT_MAX;
+            static const int sCloseActive = 0x100;
+
+            for (size_t i = 0; i < mInputs.size(); i++) {
+                sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                if (desc->mProfile != profile) {
                     continue;
                 }
-                if (client->isSoundTrigger()) {
-                    if (session == client->session()) {
+                sp<RecordClientDescriptor> topPrioClient = desc->getHighestPriorityClient();
+                if (topPrioClient == nullptr) {
+                    continue;
+                }
+                int topPrio = source_priority(topPrioClient->source());
+                if (topPrio < source_priority(attributes.source)
+                      || (topPrio == source_priority(attributes.source)
+                          && !desc->isPreemptor())) {
+                    int closeRank = (desc->isActive() ? sCloseActive : 0) + topPrio;
+                    if (closeRank < leastCloseRank) {
+                        leastCloseRank = closeRank;
+                        closeCandidate = desc;
+                    }
+                }
+            }
+
+            if (closeCandidate != nullptr) {
+                closeInput(closeCandidate->mIoHandle);
+                // Mark the new input as being issued from a preemption
+                // so that is will not be preempted later
+                isPreemptor = true;
+            } else {
+                // Then pick the best reusable input (There is always one)
+                // The order of preference is:
+                // 1) active inputs with same use case as the new client
+                // 2) inactive inputs with same use case
+                // 3) active inputs with different use cases
+                // 4) inactive inputs with different use cases
+                sp<AudioInputDescriptor> reuseCandidate;
+                int leastReuseRank = INT_MAX;
+                static const int sReuseDifferentUseCase = 0x100;
+
+                for (size_t i = 0; i < mInputs.size(); i++) {
+                    sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                    if (desc->mProfile != profile) {
+                        continue;
+                    }
+                    int reuseRank = sReuseDifferentUseCase;
+                    for (const auto& client: desc->getClientIterable()) {
+                        if (client->source() == attributes.source) {
+                            reuseRank = 0;
+                            break;
+                        }
+                    }
+                    reuseRank += desc->isActive() ? 0 : 1;
+                    if (reuseRank < leastReuseRank) {
+                        leastReuseRank = reuseRank;
+                        reuseCandidate = desc;
+                    }
+                }
+                return reuseCandidate->mIoHandle;
+            }
+        } else { // fix_input_sharing_logic()
+            for (size_t i = 0; i < mInputs.size(); ) {
+                 sp<AudioInputDescriptor> desc = mInputs.valueAt(i);
+                 if (desc->mProfile != profile) {
+                     i++;
+                     continue;
+                 }
+                // if sound trigger, reuse input if used by other sound trigger on same session
+                // else
+                //    reuse input if active client app is not in IDLE state
+                //
+                RecordClientVector clients = desc->clientsList();
+                bool doClose = false;
+                for (const auto& client : clients) {
+                    if (isSoundTrigger != client->isSoundTrigger()) {
+                        continue;
+                    }
+                    if (client->isSoundTrigger()) {
+                        if (session == client->session()) {
+                            return desc->mIoHandle;
+                        }
+                        continue;
+                    }
+                    if (client->active() && client->appState() != APP_STATE_IDLE) {
                         return desc->mIoHandle;
                     }
-                    continue;
+                    doClose = true;
                 }
-                if (client->active() && client->appState() != APP_STATE_IDLE) {
-                    return desc->mIoHandle;
+                if (doClose) {
+                    closeInput(desc->mIoHandle);
+                } else {
+                    i++;
                 }
-                doClose = true;
-            }
-            if (doClose) {
-                closeInput(desc->mIoHandle);
-            } else {
-                i++;
             }
         }
     }
 
-    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(profile, mpClientInterface);
+    sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
+            profile, mpClientInterface, isPreemptor);
 
     audio_config_t lConfig = AUDIO_CONFIG_INITIALIZER;
     lConfig.sample_rate = profileSamplingRate;
@@ -3394,6 +3512,11 @@
                                                             bool enabled,
                                                             audio_stream_type_t streamToDriveAbs)
 {
+    if (!enabled) {
+        mAbsoluteVolumeDrivingStreams.erase(deviceType);
+        return NO_ERROR;
+    }
+
     audio_attributes_t attributesToDriveAbs = mEngine->getAttributesForStreamType(streamToDriveAbs);
     if (attributesToDriveAbs == AUDIO_ATTRIBUTES_INITIALIZER) {
         ALOGW("%s: no attributes for stream %s, bailing out", __func__,
@@ -3401,12 +3524,7 @@
         return BAD_VALUE;
     }
 
-    if (enabled) {
-        mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
-    } else {
-        mAbsoluteVolumeDrivingStreams.erase(deviceType);
-    }
-
+    mAbsoluteVolumeDrivingStreams[deviceType] = attributesToDriveAbs;
     return NO_ERROR;
 }
 
@@ -5916,7 +6034,8 @@
 float AudioPolicyManager::getStreamVolumeDB(
         audio_stream_type_t stream, int index, audio_devices_t device)
 {
-    return computeVolume(getVolumeCurves(stream), toVolumeSource(stream), index, {device});
+    return computeVolume(getVolumeCurves(stream), toVolumeSource(stream), index,
+                         {device}, /* adjustAttenuation= */false);
 }
 
 status_t AudioPolicyManager::getSurroundFormats(unsigned int *numSurroundFormats,
@@ -6244,10 +6363,11 @@
     // mode is not requested.
 
     if (config != nullptr && *config != AUDIO_CONFIG_INITIALIZER) {
-        static const bool stereo_spatialization_enabled =
+        static const bool stereo_spatialization_prop_enabled =
                 property_get_bool("ro.audio.stereo_spatialization_enabled", false);
         const bool channel_mask_spatialized =
-                (stereo_spatialization_enabled && com_android_media_audio_stereo_spatialization())
+                (stereo_spatialization_prop_enabled
+                        && com_android_media_audio_stereo_spatialization())
                 ? audio_channel_mask_contains_stereo(config->channel_mask)
                 : audio_is_channel_mask_spatialized(config->channel_mask);
         if (!channel_mask_spatialized) {
@@ -6598,10 +6718,11 @@
             sp<SwAudioOutputDescriptor> outputDesc = new SwAudioOutputDescriptor(outProfile,
                                                                                  mpClientInterface);
             audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+            audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
             status_t status = outputDesc->open(nullptr /* halConfig */, nullptr /* mixerConfig */,
                                                DeviceVector(supportedDevice),
                                                AUDIO_STREAM_DEFAULT,
-                                               AUDIO_OUTPUT_FLAG_NONE, &output);
+                                               AUDIO_OUTPUT_FLAG_NONE, &output, attributes);
             if (status != NO_ERROR) {
                 ALOGW("Cannot open output stream for devices %s on hw module %s",
                       supportedDevice->toString().c_str(), hwModule->getName());
@@ -6662,8 +6783,8 @@
                 continue;
             }
 
-            sp<AudioInputDescriptor> inputDesc =
-                    new AudioInputDescriptor(inProfile, mpClientInterface);
+            sp<AudioInputDescriptor> inputDesc = new AudioInputDescriptor(
+                    inProfile, mpClientInterface, false /*isPreemptor*/);
 
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
             status_t status = inputDesc->open(nullptr,
@@ -6981,7 +7102,7 @@
                 continue;
             }
 
-            desc = new AudioInputDescriptor(profile, mpClientInterface);
+            desc = new AudioInputDescriptor(profile, mpClientInterface, false  /*isPreemptor*/);
             audio_io_handle_t input = AUDIO_IO_HANDLE_NONE;
             ALOGV("%s opening input for profile %s", __func__, profile->getTagName().c_str());
             status = desc->open(nullptr, device, AUDIO_SOURCE_MIC,
@@ -7404,7 +7525,8 @@
                     client->getSecondaryOutputs().begin(),
                     client->getSecondaryOutputs().end(),
                     secondaryDescs.begin(), secondaryDescs.end())) {
-                if (!audio_is_linear_pcm(client->config().format)) {
+                if (client->flags() & AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD
+                        || !audio_is_linear_pcm(client->config().format)) {
                     // If the format is not PCM, the tracks should be invalidated to get correct
                     // behavior when the secondary output is changed.
                     clientsToInvalidate.push_back(client->portId());
@@ -8100,7 +8222,7 @@
             VolumeSource vsToDriveAbs = toVolumeSource(groupToDriveAbs);
             if (vsToDriveAbs == volumeSource) {
                 // attenuation is applied by the abs volume controller
-                return volumeDbMax;
+                return (index != 0) ? volumeDbMax : volumeDb;
             } else {
                 IVolumeCurves &curvesAbs = getVolumeCurves(vsToDriveAbs);
                 int indexAbs = curvesAbs.getVolumeIndex({volumeDevice});
@@ -8124,9 +8246,15 @@
                                         VolumeSource volumeSource,
                                         int index,
                                         const DeviceTypeSet& deviceTypes,
+                                        bool adjustAttenuation,
                                         bool computeInternalInteraction)
 {
-    float volumeDb = adjustDeviceAttenuationForAbsVolume(curves, volumeSource, index, deviceTypes);
+    float volumeDb;
+    if (adjustAttenuation) {
+        volumeDb = adjustDeviceAttenuationForAbsVolume(curves, volumeSource, index, deviceTypes);
+    } else {
+        volumeDb = curves.volIndexToDb(Volume::getDeviceCategory(deviceTypes), index);
+    }
     ALOGV("%s volume source %d, index %d,  devices %s, compute internal %b ", __func__,
           volumeSource, index, dumpDeviceTypes(deviceTypes).c_str(), computeInternalInteraction);
 
@@ -8147,7 +8275,8 @@
             mOutputs.isActive(ringVolumeSrc, 0)) {
         auto &ringCurves = getVolumeCurves(AUDIO_STREAM_RING);
         const float ringVolumeDb = computeVolume(ringCurves, ringVolumeSrc, index, deviceTypes,
-                /* computeInternalInteraction= */ false);
+                                                 adjustAttenuation,
+                                                 /* computeInternalInteraction= */false);
         return ringVolumeDb - 4 > volumeDb ? ringVolumeDb - 4 : volumeDb;
     }
 
@@ -8165,7 +8294,7 @@
         int voiceVolumeIndex = voiceCurves.getVolumeIndex(deviceTypes);
         const float maxVoiceVolDb =
                 computeVolume(voiceCurves, callVolumeSrc, voiceVolumeIndex, deviceTypes,
-                              /* computeInternalInteraction= */ false)
+                        adjustAttenuation, /* computeInternalInteraction= */false)
                 + IN_CALL_EARPIECE_HEADROOM_DB;
         // FIXME: Workaround for call screening applications until a proper audio mode is defined
         // to support this scenario : Exempt the RING stream from the audio cap if the audio was
@@ -8218,7 +8347,8 @@
                                              musicVolumeSrc,
                                              musicCurves.getVolumeIndex(musicDevice),
                                              musicDevice,
-                                             /* computeInternalInteraction= */ false);
+                                              adjustAttenuation,
+                                              /* computeInternalInteraction= */ false);
             float minVolDb = (musicVolDb > SONIFICATION_HEADSET_VOLUME_MIN_DB) ?
                         musicVolDb : SONIFICATION_HEADSET_VOLUME_MIN_DB;
             if (volumeDb > minVolDb) {
@@ -8352,11 +8482,19 @@
                                                    bool& isBtScoVolSrc,
                                                    const char* caller) {
     const VolumeSource callVolSrc = toVolumeSource(AUDIO_STREAM_VOICE_CALL, false);
-    const VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
+    isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+
     const bool isScoRequested = isScoRequestedForComm();
     const bool isHAUsed = isHearingAidUsedForComm();
 
-    isVoiceVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource);
+    if (com_android_media_audio_replace_stream_bt_sco()) {
+        ALOGV("%s stream bt sco is replaced, no volume consistency check for calls", __func__);
+        isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (callVolSrc == volumeSource) &&
+                        (isScoRequested || isHAUsed);
+        return true;
+    }
+
+    const VolumeSource btScoVolSrc = toVolumeSource(AUDIO_STREAM_BLUETOOTH_SCO, false);
     isBtScoVolSrc = (volumeSource != VOLUME_SOURCE_NONE) && (btScoVolSrc == volumeSource);
 
     if ((callVolSrc != btScoVolSrc) &&
@@ -8799,8 +8937,9 @@
     }
     sp<SwAudioOutputDescriptor> desc = new SwAudioOutputDescriptor(profile, mpClientInterface);
     audio_io_handle_t output = AUDIO_IO_HANDLE_NONE;
+    audio_attributes_t attributes = AUDIO_ATTRIBUTES_INITIALIZER;
     status_t status = desc->open(halConfig, mixerConfig, devices,
-            AUDIO_STREAM_DEFAULT, flags, &output);
+            AUDIO_STREAM_DEFAULT, flags, &output, attributes);
     if (status != NO_ERROR) {
         ALOGE("%s failed to open output %d", __func__, status);
         return nullptr;
@@ -8838,7 +8977,8 @@
         config.offload_info.channel_mask = config.channel_mask;
         config.offload_info.format = config.format;
 
-        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output);
+        status = desc->open(&config, mixerConfig, devices, AUDIO_STREAM_DEFAULT, flags, &output,
+                            attributes);
         if (status != NO_ERROR) {
             return nullptr;
         }
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.h b/services/audiopolicy/managerdefault/AudioPolicyManager.h
index 1b4f33a..c0a5012 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.h
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.h
@@ -590,6 +590,8 @@
          * @param index index to match in the volume curves for the calculation
          * @param deviceTypes devices that should be considered in the volume curves for the
          *        calculation
+         * @param adjustAttenuation boolean indicating whether we should adjust the value to
+         *        avoid double attenuation when controlling an avrcp device
          * @param computeInternalInteraction boolean indicating whether recursive volume computation
          *        should continue within the volume computation. Defaults to {@code true} so the
          *        volume interactions can be computed. Calls within the method should always set the
@@ -598,6 +600,7 @@
          */
         virtual float computeVolume(IVolumeCurves &curves, VolumeSource volumeSource,
                                int index, const DeviceTypeSet& deviceTypes,
+                               bool adjustAttenuation = true,
                                bool computeInternalInteraction = true);
 
         // rescale volume index from srcStream within range of dstStream
@@ -1152,7 +1155,8 @@
                 const audio_config_t *config,
                 audio_output_flags_t flags,
                 const DeviceVector &devices,
-                audio_io_handle_t *output);
+                audio_io_handle_t *output,
+                audio_attributes_t attributes);
 
         /**
          * @brief Queries if some kind of spatialization will be performed if the audio playback
diff --git a/services/audiopolicy/permission/Android.bp b/services/audiopolicy/permission/Android.bp
index d5f59a0..cfbeaae 100644
--- a/services/audiopolicy/permission/Android.bp
+++ b/services/audiopolicy/permission/Android.bp
@@ -34,8 +34,8 @@
     shared_libs: [
         "libbase",
         "libbinder",
-        "libutils",
         "liblog",
+        "libutils",
     ],
 
     host_supported: true,
@@ -43,21 +43,21 @@
         integer_overflow: true,
     },
     cflags: [
-        "-Wall",
-        "-Wdeprecated",
-        "-Wextra",
-        "-Werror=format",
-        "-Wextra-semi",
-        "-Wthread-safety",
-        "-Wconditional-uninitialized",
-        "-Wimplicit-fallthrough",
-        "-Wreorder-init-list",
-        "-Werror=reorder-init-list",
-        "-Wshadow-all",
-        "-Wunreachable-code-aggressive",
-        "-Werror",
         "-DANDROID_BASE_UNIQUE_FD_DISABLE_IMPLICIT_CONVERSION",
         "-DANDROID_UTILS_REF_BASE_DISABLE_IMPLICIT_CONSTRUCTION",
+        "-Wall",
+        "-Wconditional-uninitialized",
+        "-Wdeprecated",
+        "-Werror",
+        "-Werror=format",
+        "-Werror=reorder-init-list",
+        "-Wextra",
+        "-Wextra-semi",
+        "-Wimplicit-fallthrough",
+        "-Wreorder-init-list",
+        "-Wshadow-all",
+        "-Wthread-safety",
+        "-Wunreachable-code-aggressive",
     ],
     tidy: true,
     tidy_checks: [
diff --git a/services/audiopolicy/permission/NativePermissionController.cpp b/services/audiopolicy/permission/NativePermissionController.cpp
index 8659f2c..5743076 100644
--- a/services/audiopolicy/permission/NativePermissionController.cpp
+++ b/services/audiopolicy/permission/NativePermissionController.cpp
@@ -44,11 +44,6 @@
             return "audioserver";
         case AID_CAMERASERVER:
             return "cameraserver";
-        // These packages are not handled by AppOps, but labeling may be useful for us
-        case AID_RADIO:
-            return "telephony";
-        case AID_BLUETOOTH:
-            return "bluetooth";
         default:
             return std::nullopt;
     }
@@ -129,10 +124,12 @@
 
 BinderResult<bool> NativePermissionController::validateUidPackagePair(
         uid_t uid, const std::string& packageName) const {
+    if (uid == AID_ROOT || uid == AID_SYSTEM) return true;
     uid = uid % AID_USER_OFFSET;
     const auto fixed_package_opt = getFixedPackageName(uid);
     if (fixed_package_opt.has_value()) {
-        return packageName == fixed_package_opt.value();
+        return (uid == AID_ROOT || uid == AID_SYSTEM) ? true :
+                packageName == fixed_package_opt.value();
     }
     std::lock_guard l{m_};
     if (!is_package_populated_) {
@@ -148,6 +145,7 @@
 
 BinderResult<bool> NativePermissionController::checkPermission(PermissionEnum perm,
                                                                uid_t uid) const {
+    if (uid == AID_ROOT || uid == AID_SYSTEM || uid == getuid()) return true;
     std::lock_guard l{m_};
     const auto& uids = permission_map_[static_cast<size_t>(perm)];
     if (!uids.empty()) {
diff --git a/services/audiopolicy/service/AudioPolicyClientImpl.cpp b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
index f70dc52..22fc151 100644
--- a/services/audiopolicy/service/AudioPolicyClientImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyClientImpl.cpp
@@ -56,7 +56,8 @@
                                                            audio_config_base_t *mixerConfig,
                                                            const sp<DeviceDescriptorBase>& device,
                                                            uint32_t *latencyMs,
-                                                           audio_output_flags_t flags)
+                                                           audio_output_flags_t flags,
+                                                           audio_attributes_t attributes)
 {
     sp<IAudioFlinger> af = AudioSystem::get_audio_flinger();
     if (af == 0) {
@@ -74,6 +75,8 @@
             legacy2aidl_audio_config_base_t_AudioConfigBase(*mixerConfig, false /*isInput*/));
     request.device = VALUE_OR_RETURN_STATUS(legacy2aidl_DeviceDescriptorBase(device));
     request.flags = VALUE_OR_RETURN_STATUS(legacy2aidl_audio_output_flags_t_int32_t_mask(flags));
+    request.attributes = VALUE_OR_RETURN_STATUS(
+            legacy2aidl_audio_attributes_t_AudioAttributes(attributes));
 
     status_t status = af->openOutput(request, &response);
     if (status == OK) {
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index 768cd07..e598897 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -23,6 +23,8 @@
 
 #include <android/content/AttributionSourceState.h>
 #include <android_media_audiopolicy.h>
+#include <com_android_media_audio.h>
+#include <error/expected_utils.h>
 #include <media/AidlConversion.h>
 #include <media/AudioPolicy.h>
 #include <media/AudioValidator.h>
@@ -44,13 +46,30 @@
         if (!_tmp.isOk()) return _tmp; \
     }
 
+#define CHECK_PERM(expr1, expr2) \
+    VALUE_OR_RETURN_STATUS(getPermissionProvider().checkPermission((expr1), (expr2)))
+
 #define MAX_ITEMS_PER_LIST 1024
 
 namespace android {
 namespace audiopolicy_flags = android::media::audiopolicy;
 using binder::Status;
 using aidl_utils::binderStatusFromStatusT;
+using com::android::media::audio::audioserver_permissions;
 using com::android::media::permission::NativePermissionController;
+using com::android::media::permission::PermissionEnum::ACCESS_ULTRASOUND;
+using com::android::media::permission::PermissionEnum::CALL_AUDIO_INTERCEPTION;
+using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_HOTWORD;
+using com::android::media::permission::PermissionEnum::CAPTURE_VOICE_COMMUNICATION_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_AUDIO_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_MEDIA_OUTPUT;
+using com::android::media::permission::PermissionEnum::CAPTURE_TUNER_AUDIO_INPUT;
+using com::android::media::permission::PermissionEnum::MODIFY_AUDIO_ROUTING;
+using com::android::media::permission::PermissionEnum::MODIFY_AUDIO_SETTINGS;
+using com::android::media::permission::PermissionEnum::MODIFY_DEFAULT_AUDIO_EFFECTS;
+using com::android::media::permission::PermissionEnum::MODIFY_PHONE_STATE;
+using com::android::media::permission::PermissionEnum::RECORD_AUDIO;
+using com::android::media::permission::PermissionEnum::WRITE_SECURE_SETTINGS;
 using content::AttributionSourceState;
 using media::audio::common::AudioConfig;
 using media::audio::common::AudioConfigBase;
@@ -86,31 +105,37 @@
         != std::end(mSupportedSystemUsages);
 }
 
-status_t AudioPolicyService::validateUsage(const audio_attributes_t& attr) {
+Status AudioPolicyService::validateUsage(const audio_attributes_t& attr) {
      return validateUsage(attr, getCallingAttributionSource());
 }
 
-status_t AudioPolicyService::validateUsage(const audio_attributes_t& attr,
+Status AudioPolicyService::validateUsage(const audio_attributes_t& attr,
         const AttributionSourceState& attributionSource) {
     if (isSystemUsage(attr.usage)) {
         if (isSupportedSystemUsage(attr.usage)) {
             if (attr.usage == AUDIO_USAGE_CALL_ASSISTANT
                     && ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)) {
-                if (!callAudioInterceptionAllowed(attributionSource)) {
+                if (!(audioserver_permissions() ?
+                            CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                            : callAudioInterceptionAllowed(attributionSource))) {
                     ALOGE("%s: call audio interception not allowed for attribution source: %s",
                            __func__, attributionSource.toString().c_str());
-                    return PERMISSION_DENIED;
+                    return Status::fromExceptionCode(Status::EX_SECURITY,
+                            "Call audio interception not allowed");
                 }
-            } else if (!modifyAudioRoutingAllowed(attributionSource)) {
+            } else if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid)
+                        : modifyAudioRoutingAllowed(attributionSource))) {
                 ALOGE("%s: modify audio routing not allowed for attribution source: %s",
                         __func__, attributionSource.toString().c_str());
-                return PERMISSION_DENIED;
+                    return Status::fromExceptionCode(Status::EX_SECURITY,
+                            "Modify audio routing not allowed");
             }
         } else {
-            return BAD_VALUE;
+            return Status::fromExceptionCode(Status::EX_ILLEGAL_ARGUMENT);
         }
     }
-    return NO_ERROR;
+    return Status::ok();
 }
 
 
@@ -137,7 +162,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (state != AUDIO_POLICY_DEVICE_STATE_AVAILABLE &&
@@ -191,7 +218,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -215,7 +244,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(state) >= AUDIO_MODE_CNT) {
@@ -265,7 +296,9 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    if (!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+            : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -355,7 +388,7 @@
 
     RETURN_IF_BINDER_ERROR(
             binderStatusFromStatusT(AudioValidator::validateAudioAttributes(attr, "68953950")));
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr, attributionSource)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attr, attributionSource));
 
     ALOGV("%s()", __func__);
     audio_utils::lock_guard _l(mMutex);
@@ -364,14 +397,22 @@
         aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
         attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
+    const bool bypassInterruptionAllowed = audioserver_permissions() ? (
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid) ||
+            CHECK_PERM(MODIFY_PHONE_STATE, attributionSource.uid) ||
+            CHECK_PERM(WRITE_SECURE_SETTINGS, attributionSource.uid))
+            : bypassInterruptionPolicyAllowed(attributionSource);
+
     if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
-            && !bypassInterruptionPolicyAllowed(attributionSource)) {
+            && !bypassInterruptionAllowed) {
         attr.flags = static_cast<audio_flags_mask_t>(
                 attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
 
     if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(attributionSource)) {
+        if (!(audioserver_permissions() ?
+                CHECK_PERM(ACCESS_ULTRASOUND, attributionSource.uid)
+                : accessUltrasoundAllowed(attributionSource))) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
                     __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -400,18 +441,24 @@
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
             if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
-                && !callAudioInterceptionAllowed(attributionSource)) {
+                && !(audioserver_permissions() ?
+                        CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                : callAudioInterceptionAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: call redirection not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
-            } else if (!modifyPhoneStateAllowed(attributionSource)) {
+            } else if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_PHONE_STATE, attributionSource.uid)
+                    : modifyPhoneStateAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
         case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
-            if (!modifyAudioRoutingAllowed(attributionSource)) {
+            if (!(audioserver_permissions() ?
+                        CHECK_PERM(MODIFY_AUDIO_ROUTING, attributionSource.uid)
+                    : modifyAudioRoutingAllowed(attributionSource))) {
                 ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
                     __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
@@ -430,7 +477,7 @@
 
         sp<AudioPlaybackClient> client =
                 new AudioPlaybackClient(attr, output, attributionSource, session,
-                    portId, selectedDeviceId, stream, isSpatialized);
+                    portId, selectedDeviceId, stream, isSpatialized, config.channel_mask);
         mAudioPlaybackClients.add(portId, client);
 
         _aidl_return->output = VALUE_OR_RETURN_BINDER_STATUS(
@@ -630,8 +677,7 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
-            attributionSource)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attr, attributionSource));
 
     uint32_t virtualDeviceId = kDefaultVirtualDeviceId;
 
@@ -644,7 +690,10 @@
     // type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
     // is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
     // - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
-    if (!(recordingAllowed(attributionSource, inputSource)
+    const auto isRecordingAllowed = audioserver_permissions() ?
+            CHECK_PERM(RECORD_AUDIO, attributionSource.uid) :
+            recordingAllowed(attributionSource, inputSource);
+    if (!(isRecordingAllowed
             || inputSource == AUDIO_SOURCE_FM_TUNER
             || inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
             || inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
@@ -653,8 +702,12 @@
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureOutput = captureAudioOutputAllowed(attributionSource);
-    bool canInterceptCallAudio = callAudioInterceptionAllowed(attributionSource);
+    bool canCaptureOutput = audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_AUDIO_OUTPUT, attributionSource.uid)
+                        : captureAudioOutputAllowed(attributionSource);
+    bool canInterceptCallAudio = audioserver_permissions() ?
+                        CHECK_PERM(CALL_AUDIO_INTERCEPTION, attributionSource.uid)
+                        : callAudioInterceptionAllowed(attributionSource);
     bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
              || inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
              || inputSource == AUDIO_SOURCE_VOICE_CALL;
@@ -668,11 +721,15 @@
     }
     if (inputSource == AUDIO_SOURCE_FM_TUNER
         && !canCaptureOutput
-        && !captureTunerAudioInputAllowed(attributionSource)) {
+        && !(audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_TUNER_AUDIO_INPUT, attributionSource.uid)
+            : captureTunerAudioInputAllowed(attributionSource))) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureHotword = captureHotwordAllowed(attributionSource);
+    bool canCaptureHotword = audioserver_permissions() ?
+                        CHECK_PERM(CAPTURE_AUDIO_HOTWORD, attributionSource.uid)
+                        : captureHotwordAllowed(attributionSource);
     if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -687,7 +744,9 @@
     }
 
     if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(attributionSource)) {
+        if (!(audioserver_permissions() ?
+                CHECK_PERM(ACCESS_ULTRASOUND, attributionSource.uid)
+                : accessUltrasoundAllowed(attributionSource))) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
                     __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
@@ -733,14 +792,29 @@
                     status = PERMISSION_DENIED;
                 }
                 break;
-            case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!(modifyAudioRoutingAllowed(attributionSource)
+            case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE: {
+                bool modAudioRoutingAllowed;
+                if (audioserver_permissions()) {
+                        auto result = getPermissionProvider().checkPermission(
+                                MODIFY_AUDIO_ROUTING, attributionSource.uid);
+                        if (!result.ok()) {
+                            ALOGE("%s permission provider error: %s", __func__,
+                                    result.error().toString8().c_str());
+                            status = aidl_utils::statusTFromBinderStatus(result.error());
+                            break;
+                        }
+                        modAudioRoutingAllowed = result.value();
+                } else {
+                    modAudioRoutingAllowed = modifyAudioRoutingAllowed(attributionSource);
+                }
+                if (!(modAudioRoutingAllowed
                         || ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
                             && canInterceptCallAudio))) {
                     ALOGE("%s permission denied for remote submix capture", __func__);
                     status = PERMISSION_DENIED;
                 }
                 break;
+            }
             case AudioPolicyInterface::API_INPUT_INVALID:
             default:
                 LOG_ALWAYS_FATAL("%s encountered an invalid input type %d",
@@ -1035,7 +1109,9 @@
     if (mAudioPolicyManager == nullptr) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(streamToDriveAbs) >= AUDIO_STREAM_PUBLIC_CNT) {
@@ -1059,7 +1135,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
@@ -1083,7 +1161,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (uint32_t(stream) >= AUDIO_STREAM_PUBLIC_CNT) {
@@ -1133,7 +1213,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     audio_utils::lock_guard _l(mMutex);
@@ -1439,7 +1521,9 @@
 
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->addSourceDefaultEffect(
@@ -1465,7 +1549,9 @@
 
     sp<AudioPolicyEffects> audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(audioPolicyEffects->addStreamDefaultEffect(
@@ -1480,7 +1566,9 @@
             aidl2legacy_int32_t_audio_unique_id_t(idAidl));
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     return binderStatusFromStatusT(audioPolicyEffects->removeSourceDefaultEffect(id));
@@ -1492,7 +1580,9 @@
             aidl2legacy_int32_t_audio_unique_id_t(idAidl));
     sp<AudioPolicyEffects>audioPolicyEffects;
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(getAudioPolicyEffects(audioPolicyEffects)));
-    if (!modifyDefaultAudioEffectsAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_DEFAULT_AUDIO_EFFECTS, IPCThreadState::self()->getCallingUid())
+                : modifyDefaultAudioEffectsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     return binderStatusFromStatusT(audioPolicyEffects->removeStreamDefaultEffect(id));
@@ -1510,7 +1600,9 @@
                          std::back_inserter(systemUsages), aidl2legacy_AudioUsage_audio_usage_t)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1569,7 +1661,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attributes));
 
     audio_utils::lock_guard _l(mMutex);
     *_aidl_return = mAudioPolicyManager->isDirectOutputSupported(config, attributes);
@@ -1644,7 +1736,9 @@
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(AudioValidator::validateAudioPatch(patch)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1663,7 +1757,9 @@
     audio_patch_handle_t handle = VALUE_OR_RETURN_BINDER_STATUS(
             aidl2legacy_int32_t_audio_patch_handle_t(handleAidl));
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1711,7 +1807,9 @@
             binderStatusFromStatusT(AudioValidator::validateAudioPortConfig(config)));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1774,7 +1872,9 @@
     // loopback|render only need a MediaProjection (checked in caller AudioService.java)
     bool needModifyAudioRouting = std::any_of(mixes.begin(), mixes.end(), [](auto& mix) {
             return !is_mix_loopback_render(mix.mRouteFlags); });
-    if (needModifyAudioRouting && !modifyAudioRoutingAllowed()) {
+    if (needModifyAudioRouting && !(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1790,12 +1890,16 @@
     const AttributionSourceState attributionSource = getCallingAttributionSource();
 
 
-    if (needCaptureMediaOutput && !captureMediaOutputAllowed(attributionSource)) {
+    if (needCaptureMediaOutput && !(audioserver_permissions() ?
+                CHECK_PERM(CAPTURE_MEDIA_OUTPUT, attributionSource.uid)
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (needCaptureVoiceCommunicationOutput &&
-        !captureVoiceCommunicationOutputAllowed(attributionSource)) {
+        !(audioserver_permissions() ?
+                CHECK_PERM(CAPTURE_VOICE_COMMUNICATION_OUTPUT, attributionSource.uid)
+                : captureVoiceCommunicationOutputAllowed(attributionSource))) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
@@ -1852,7 +1956,9 @@
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1866,7 +1972,9 @@
     uid_t uid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(uidAidl));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1885,7 +1993,9 @@
                                                         aidl2legacy_AudioDeviceTypeAddress));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+                CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+                : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1899,7 +2009,9 @@
     int userId = VALUE_OR_RETURN_BINDER_STATUS(convertReinterpret<int>(userIdAidl));
 
     audio_utils::lock_guard _l(mMutex);
-    if(!modifyAudioRoutingAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_ROUTING, IPCThreadState::self()->getCallingUid())
+            : modifyAudioRoutingAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     if (mAudioPolicyManager == NULL) {
@@ -1927,7 +2039,7 @@
         return binderStatusFromStatusT(NO_INIT);
     }
 
-    RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attributes)));
+    RETURN_IF_BINDER_ERROR(validateUsage(attributes));
 
     // startAudioSource should be created as the calling uid
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
@@ -1956,7 +2068,9 @@
     if (mAudioPolicyManager == NULL) {
         return binderStatusFromStatusT(NO_INIT);
     }
-    if (!settingsAllowed()) {
+    if (!(audioserver_permissions() ?
+            CHECK_PERM(MODIFY_AUDIO_SETTINGS, IPCThreadState::self()->getCallingUid())
+            : settingsAllowed())) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
     audio_utils::lock_guard _l(mMutex);
diff --git a/services/audiopolicy/service/AudioPolicyService.cpp b/services/audiopolicy/service/AudioPolicyService.cpp
index 6144f8c..cbc0b41 100644
--- a/services/audiopolicy/service/AudioPolicyService.cpp
+++ b/services/audiopolicy/service/AudioPolicyService.cpp
@@ -590,12 +590,13 @@
             if (status == NO_ERROR && currentOutput == newOutput) {
                 return;
             }
-            size_t numActiveTracks = countActiveClientsOnOutput_l(newOutput);
+            std::vector<audio_channel_mask_t> activeTracksMasks =
+                    getActiveTracksMasks_l(newOutput);
             mMutex.unlock();
             // It is OK to call detachOutput() is none is already attached.
             mSpatializer->detachOutput();
             if (status == NO_ERROR && newOutput != AUDIO_IO_HANDLE_NONE) {
-                status = mSpatializer->attachOutput(newOutput, numActiveTracks);
+                status = mSpatializer->attachOutput(newOutput, activeTracksMasks);
             }
             mMutex.lock();
             if (status != NO_ERROR) {
@@ -613,17 +614,17 @@
     }
 }
 
-size_t AudioPolicyService::countActiveClientsOnOutput_l(
+std::vector<audio_channel_mask_t> AudioPolicyService::getActiveTracksMasks_l(
         audio_io_handle_t output, bool spatializedOnly) {
-    size_t count = 0;
+    std::vector<audio_channel_mask_t> activeTrackMasks;
     for (size_t i = 0; i < mAudioPlaybackClients.size(); i++) {
         auto client = mAudioPlaybackClients.valueAt(i);
         if (client->io == output && client->active
                 && (!spatializedOnly || client->isSpatialized)) {
-            count++;
+            activeTrackMasks.push_back(client->channelMask);
         }
     }
-    return count;
+    return activeTrackMasks;
 }
 
 void AudioPolicyService::onUpdateActiveSpatializerTracks_l() {
@@ -639,12 +640,12 @@
         return;
     }
     audio_io_handle_t output = mSpatializer->getOutput();
-    size_t activeClients;
+    std::vector<audio_channel_mask_t> activeTracksMasks;
     {
         audio_utils::lock_guard _l(mMutex);
-        activeClients = countActiveClientsOnOutput_l(output);
+        activeTracksMasks = getActiveTracksMasks_l(output);
     }
-    mSpatializer->updateActiveTracks(activeClients);
+    mSpatializer->updateActiveTracks(activeTracksMasks);
 }
 
 status_t AudioPolicyService::clientCreateAudioPatch(const struct audio_patch *patch,
diff --git a/services/audiopolicy/service/AudioPolicyService.h b/services/audiopolicy/service/AudioPolicyService.h
index 699cacf..92c162f 100644
--- a/services/audiopolicy/service/AudioPolicyService.h
+++ b/services/audiopolicy/service/AudioPolicyService.h
@@ -452,8 +452,8 @@
     app_state_t apmStatFromAmState(int amState);
 
     bool isSupportedSystemUsage(audio_usage_t usage);
-    status_t validateUsage(const audio_attributes_t& attr);
-    status_t validateUsage(const audio_attributes_t& attr,
+    binder::Status validateUsage(const audio_attributes_t& attr);
+    binder::Status validateUsage(const audio_attributes_t& attr,
                            const AttributionSourceState& attributionSource);
 
     void updateUidStates();
@@ -790,7 +790,8 @@
                                     audio_config_base_t *mixerConfig,
                                     const sp<DeviceDescriptorBase>& device,
                                     uint32_t *latencyMs,
-                                    audio_output_flags_t flags);
+                                    audio_output_flags_t flags,
+                                    audio_attributes_t attributes);
         // creates a special output that is duplicated to the two outputs passed as arguments. The duplication is performed by
         // a special mixer thread in the AudioFlinger.
         virtual audio_io_handle_t openDuplicateOutput(audio_io_handle_t output1, audio_io_handle_t output2);
@@ -976,13 +977,15 @@
                       const audio_io_handle_t io, AttributionSourceState attributionSource,
                             const audio_session_t session, audio_port_handle_t portId,
                             audio_port_handle_t deviceId, audio_stream_type_t stream,
-                            bool isSpatialized) :
+                            bool isSpatialized, audio_channel_mask_t channelMask) :
                     AudioClient(attributes, io, attributionSource, session, portId,
-                        deviceId), stream(stream), isSpatialized(isSpatialized)  {}
+                        deviceId), stream(stream), isSpatialized(isSpatialized),
+                        channelMask(channelMask) {}
                 ~AudioPlaybackClient() override = default;
 
         const audio_stream_type_t stream;
         const bool isSpatialized;
+        const audio_channel_mask_t channelMask;
     };
 
     void getPlaybackClientAndEffects(audio_port_handle_t portId,
@@ -1013,14 +1016,14 @@
     void unloadAudioPolicyManager();
 
     /**
-     * Returns the number of active audio tracks on the specified output mixer.
+     * Returns the channel masks for active audio tracks on the specified output mixer.
      * The query can be specified to only include spatialized audio tracks or consider
      * all tracks.
      * @param output the I/O handle of the output mixer to consider
      * @param spatializedOnly true if only spatialized tracks should be considered
-     * @return the number of active tracks.
+     * @return a list of channel masks for all active tracks matching the condition.
      */
-    size_t countActiveClientsOnOutput_l(
+    std::vector<audio_channel_mask_t> getActiveTracksMasks_l(
             audio_io_handle_t output, bool spatializedOnly = true) REQUIRES(mMutex);
 
     mutable audio_utils::mutex mMutex{audio_utils::MutexOrder::kAudioPolicyService_Mutex};
diff --git a/services/audiopolicy/service/Spatializer.cpp b/services/audiopolicy/service/Spatializer.cpp
index c98f8df..9cc3b8f 100644
--- a/services/audiopolicy/service/Spatializer.cpp
+++ b/services/audiopolicy/service/Spatializer.cpp
@@ -936,7 +936,8 @@
             });
 }
 
-status_t Spatializer::attachOutput(audio_io_handle_t output, size_t numActiveTracks) {
+status_t Spatializer::attachOutput(audio_io_handle_t output,
+          const std::vector<audio_channel_mask_t>& activeTracksMasks) {
     bool outputChanged = false;
     sp<media::INativeSpatializerCallback> callback;
 
@@ -944,7 +945,7 @@
         audio_utils::lock_guard lock(mMutex);
         ALOGV("%s output %d mOutput %d", __func__, (int)output, (int)mOutput);
         mLocalLog.log("%s with output %d tracks %zu (mOutput %d)", __func__, (int)output,
-                      numActiveTracks, (int)mOutput);
+                      activeTracksMasks.size(), (int)mOutput);
         if (mOutput != AUDIO_IO_HANDLE_NONE) {
             LOG_ALWAYS_FATAL_IF(mEngine == nullptr, "%s output set without FX engine", __func__);
             // remove FX instance
@@ -969,7 +970,7 @@
 
         outputChanged = mOutput != output;
         mOutput = output;
-        mNumActiveTracks = numActiveTracks;
+        mActiveTracksMasks = activeTracksMasks;
         AudioSystem::addSupportedLatencyModesCallback(this);
 
         std::vector<audio_latency_mode_t> latencyModes;
@@ -1008,7 +1009,8 @@
 
     {
         audio_utils::lock_guard lock(mMutex);
-        mLocalLog.log("%s with output %d tracks %zu", __func__, (int)mOutput, mNumActiveTracks);
+        mLocalLog.log("%s with output %d num tracks %zu",
+            __func__, (int)mOutput, mActiveTracksMasks.size());
         ALOGV("%s mOutput %d", __func__, (int)mOutput);
         if (mOutput == AUDIO_IO_HANDLE_NONE) {
             return output;
@@ -1051,11 +1053,13 @@
     }
 }
 
-void Spatializer::updateActiveTracks(size_t numActiveTracks) {
+void Spatializer::updateActiveTracks(
+        const std::vector<audio_channel_mask_t>& activeTracksMasks) {
     audio_utils::lock_guard lock(mMutex);
-    if (mNumActiveTracks != numActiveTracks) {
-        mLocalLog.log("%s from %zu to %zu", __func__, mNumActiveTracks, numActiveTracks);
-        mNumActiveTracks = numActiveTracks;
+    if (mActiveTracksMasks != activeTracksMasks) {
+        mLocalLog.log("%s from %zu to %zu",
+                __func__, mActiveTracksMasks.size(), activeTracksMasks.size());
+        mActiveTracksMasks = activeTracksMasks;
         checkEngineState_l();
         checkSensorsState_l();
     }
@@ -1114,7 +1118,7 @@
         if (mPoseController != nullptr) {
             // TODO(b/253297301, b/255433067) reenable low latency condition check
             // for Head Tracking after Bluetooth HAL supports it correctly.
-            if (mNumActiveTracks > 0 && mLevel != Spatialization::Level::NONE
+            if (shouldUseHeadTracking_l() && mLevel != Spatialization::Level::NONE
                     && mDesiredHeadTrackingMode != HeadTrackingMode::STATIC
                     && mHeadSensor != SpatializerPoseController::INVALID_SENSOR) {
                 if (supportsLowLatencyMode) {
@@ -1146,9 +1150,28 @@
     }
 }
 
+
+/* static */
+bool Spatializer::containsImmersiveChannelMask(
+        const std::vector<audio_channel_mask_t>& masks)
+{
+    for (auto mask : masks) {
+        if (audio_is_channel_mask_spatialized(mask)) {
+            return true;
+        }
+    }
+    // Only non-immersive channel masks, e.g. AUDIO_CHANNEL_OUT_STEREO, are present.
+    return false;
+}
+
+bool Spatializer::shouldUseHeadTracking_l() const {
+    // Headtracking only available on immersive channel masks.
+    return containsImmersiveChannelMask(mActiveTracksMasks);
+}
+
 void Spatializer::checkEngineState_l() {
     if (mEngine != nullptr) {
-        if (mLevel != Spatialization::Level::NONE && mNumActiveTracks > 0) {
+        if (mLevel != Spatialization::Level::NONE && mActiveTracksMasks.size() > 0) {
             mEngine->setEnabled(true);
             setEffectParameter_l(SPATIALIZER_PARAM_LEVEL,
                     std::vector<Spatialization::Level>{mLevel});
@@ -1237,7 +1260,8 @@
     base::StringAppendF(&ss, "\n%smSupportsHeadTracking: %s\n", prefixSpace.c_str(),
                         mSupportsHeadTracking ? "true" : "false");
     // 2. Settings (Output, tracks)
-    base::StringAppendF(&ss, "%smNumActiveTracks: %zu\n", prefixSpace.c_str(), mNumActiveTracks);
+    base::StringAppendF(&ss, "%sNum Active Tracks: %zu\n",
+            prefixSpace.c_str(), mActiveTracksMasks.size());
     base::StringAppendF(&ss, "%sOutputStreamHandle: %d\n", prefixSpace.c_str(), (int)mOutput);
 
     // 3. Sensors, Effect information.
@@ -1248,8 +1272,12 @@
                         mDisplayOrientation);
 
     // 4. Show flag or property state.
+    static const bool stereo_spatialization_prop_enabled =
+            property_get_bool("ro.audio.stereo_spatialization_enabled", false);
+    const bool stereo_spatialization = com_android_media_audio_stereo_spatialization()
+            && stereo_spatialization_prop_enabled;
     base::StringAppendF(&ss, "%sStereo Spatialization: %s\n", prefixSpace.c_str(),
-            com_android_media_audio_stereo_spatialization() ? "true" : "false");
+            stereo_spatialization ? "true" : "false");
 
     ss.append(prefixSpace + "CommandLog:\n");
     ss += mLocalLog.dumpToString((prefixSpace + " ").c_str(), mMaxLocalLogLine);
diff --git a/services/audiopolicy/service/Spatializer.h b/services/audiopolicy/service/Spatializer.h
index c5f159c..5ea3258 100644
--- a/services/audiopolicy/service/Spatializer.h
+++ b/services/audiopolicy/service/Spatializer.h
@@ -185,7 +185,8 @@
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is opened and the spatializer engine must be created.
      */
-    status_t attachOutput(audio_io_handle_t output, size_t numActiveTracks);
+    status_t attachOutput(audio_io_handle_t output,
+                          const std::vector<audio_channel_mask_t>& activeTracksMasks);
     /** Called by audio policy service when the special output mixer dedicated to spatialization
      * is closed and the spatializer engine must be release.
      */
@@ -199,7 +200,7 @@
         mOutput = output;
     }
 
-    void updateActiveTracks(size_t numActiveTracks);
+    void updateActiveTracks(const std::vector<audio_channel_mask_t>& activeTracksMasks);
 
     /** Gets the channel mask, sampling rate and format set for the spatializer input. */
     audio_config_base_t getAudioInConfig() const;
@@ -227,6 +228,16 @@
     void onSupportedLatencyModesChangedMsg(
             audio_io_handle_t output, std::vector<audio_latency_mode_t>&& modes);
 
+    // Made public for test only
+    /**
+     * Returns true if there exists an immersive channel mask in the vector.
+     *
+     * Example of a non-immersive channel mask such as AUDIO_CHANNEL_OUT_STEREO
+     * versus an immersive channel mask such as AUDIO_CHANNEL_OUT_5POINT1.
+     */
+    static bool containsImmersiveChannelMask(
+            const std::vector<audio_channel_mask_t>& masks);
+
 private:
     Spatializer(effect_descriptor_t engineDescriptor,
                      SpatializerPolicyCallback *callback);
@@ -462,6 +473,11 @@
      */
     audio_latency_mode_t selectHeadtrackingConnectionMode_l() REQUIRES(mMutex);
 
+    /**
+     * Indicates if current conditions are compatible with head tracking.
+     */
+    bool shouldUseHeadTracking_l() const REQUIRES(mMutex);
+
     /** Effect engine descriptor */
     const effect_descriptor_t mEngineDescriptor;
     /** Callback interface to parent audio policy service */
@@ -539,7 +555,7 @@
     sp<ALooper> mLooper;
     sp<EngineCallbackHandler> mHandler;
 
-    size_t mNumActiveTracks GUARDED_BY(mMutex) = 0;
+    std::vector<audio_channel_mask_t> mActiveTracksMasks GUARDED_BY(mMutex);
     std::vector<audio_latency_mode_t> mSupportedLatencyModes GUARDED_BY(mMutex);
     /** preference order for low latency modes according to persist.bluetooth.hid.transport */
     std::vector<audio_latency_mode_t> mOrderedLowLatencyModes;
diff --git a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
index ba0a1eb..45643f7 100644
--- a/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyManagerTestClient.h
@@ -41,7 +41,8 @@
                         audio_config_base_t * /*mixerConfig*/,
                         const sp<DeviceDescriptorBase>& /*device*/,
                         uint32_t * /*latencyMs*/,
-                        audio_output_flags_t /*flags*/) override {
+                        audio_output_flags_t /*flags*/,
+                        audio_attributes_t /*attributes*/) override {
         if (module >= mNextModuleHandle) {
             ALOGE("%s: Module handle %d has not been allocated yet (next is %d)",
                   __func__, module, mNextModuleHandle);
@@ -72,6 +73,7 @@
         *input = mNextIoHandle++;
         mOpenedInputs.insert(*input);
         ALOGD("%s: opened input %d", __func__, *input);
+        mOpenInputCallsCount++;
         return NO_ERROR;
     }
 
@@ -86,6 +88,7 @@
             return BAD_VALUE;
         }
         ALOGD("%s: closed input %d", __func__, input);
+        mCloseInputCallsCount++;
         return NO_ERROR;
     }
 
@@ -260,6 +263,18 @@
         auto it = mTracksInternalMute.find(portId);
         return it == mTracksInternalMute.end() ? false : it->second;
     }
+    void resetInputApiCallsCounters() {
+        mOpenInputCallsCount = 0;
+        mCloseInputCallsCount = 0;
+    }
+
+    size_t getCloseInputCallsCount() const {
+        return mCloseInputCallsCount;
+    }
+
+    size_t getOpenInputCallsCount() const {
+        return mOpenInputCallsCount;
+    }
 
 private:
     audio_module_handle_t mNextModuleHandle = AUDIO_MODULE_HANDLE_NONE + 1;
@@ -275,6 +290,8 @@
     std::set<audio_channel_mask_t> mSupportedChannelMasks;
     std::map<audio_port_handle_t, bool> mTracksInternalMute;
     std::set<audio_io_handle_t> mOpenedInputs;
+    size_t mOpenInputCallsCount = 0;
+    size_t mCloseInputCallsCount = 0;
 };
 
 } // namespace android
diff --git a/services/audiopolicy/tests/AudioPolicyTestClient.h b/services/audiopolicy/tests/AudioPolicyTestClient.h
index c15adcb..0299160 100644
--- a/services/audiopolicy/tests/AudioPolicyTestClient.h
+++ b/services/audiopolicy/tests/AudioPolicyTestClient.h
@@ -37,7 +37,8 @@
                         audio_config_base_t* /*mixerConfig*/,
                         const sp<DeviceDescriptorBase>& /*device*/,
                         uint32_t* /*latencyMs*/,
-                        audio_output_flags_t /*flags*/) override { return NO_INIT; }
+                        audio_output_flags_t /*flags*/,
+                        audio_attributes_t /*attributes*/) override { return NO_INIT; }
     audio_io_handle_t openDuplicateOutput(audio_io_handle_t /*output1*/,
                                           audio_io_handle_t /*output2*/) override {
         return AUDIO_IO_HANDLE_NONE;
diff --git a/services/audiopolicy/tests/audiopolicymanager_tests.cpp b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
index 4c98687..f66b911 100644
--- a/services/audiopolicy/tests/audiopolicymanager_tests.cpp
+++ b/services/audiopolicy/tests/audiopolicymanager_tests.cpp
@@ -2508,10 +2508,12 @@
                         audio_config_base_t * mixerConfig,
                         const sp<DeviceDescriptorBase>& device,
                         uint32_t * latencyMs,
-                        audio_output_flags_t flags) override {
+                        audio_output_flags_t flags,
+                        audio_attributes_t attributes) override {
         return mSimulateFailure ? BAD_VALUE :
                 AudioPolicyManagerTestClient::openOutput(
-                        module, output, halConfig, mixerConfig, device, latencyMs, flags);
+                        module, output, halConfig, mixerConfig, device, latencyMs, flags,
+                        attributes);
     }
 
     status_t openInput(audio_module_handle_t module,
@@ -3830,3 +3832,92 @@
         testing::Values(AUDIO_USAGE_NOTIFICATION_TELEPHONY_RINGTONE,
                         AUDIO_USAGE_ALARM)
 );
+
+class AudioPolicyManagerInputPreemptionTest : public AudioPolicyManagerTestWithConfigurationFile {
+};
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        SameSessionReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        LesserPriorityReusesInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    attr.source = AUDIO_SOURCE_VOICE_RECOGNITION;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(0, mClient->getCloseInputCallsCount());
+    EXPECT_EQ(input1, input2);
+}
+
+TEST_F_WITH_FLAGS(
+        AudioPolicyManagerInputPreemptionTest,
+        HigherPriorityPreemptsInput,
+        REQUIRES_FLAGS_ENABLED(
+                ACONFIG_FLAG(com::android::media::audioserver, fix_input_sharing_logic))
+) {
+    mClient->resetInputApiCallsCounters();
+
+    audio_attributes_t attr = AUDIO_ATTRIBUTES_INITIALIZER;
+    attr.source = AUDIO_SOURCE_MIC;
+    audio_port_handle_t selectedDeviceId = AUDIO_PORT_HANDLE_NONE;
+    audio_io_handle_t input1 = AUDIO_PORT_HANDLE_NONE;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input1, TEST_SESSION_ID, 1, &selectedDeviceId,
+                                            AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                            48000));
+
+    EXPECT_EQ(1, mClient->getOpenInputCallsCount());
+
+    audio_io_handle_t input2 = AUDIO_PORT_HANDLE_NONE;
+    attr.source = AUDIO_SOURCE_CAMCORDER;
+    ASSERT_NO_FATAL_FAILURE(getInputForAttr(attr, &input2, OTHER_SESSION_ID, 1, &selectedDeviceId,
+                                        AUDIO_FORMAT_PCM_16_BIT, AUDIO_CHANNEL_IN_STEREO,
+                                        48000));
+
+    EXPECT_EQ(2, mClient->getOpenInputCallsCount());
+    EXPECT_EQ(1, mClient->getCloseInputCallsCount());
+    EXPECT_NE(input1, input2);
+}
diff --git a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
index bbc19fa..67e99f2 100644
--- a/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
+++ b/services/audiopolicy/tests/resources/test_audio_policy_configuration.xml
@@ -30,7 +30,7 @@
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                              samplingRates="48000" channelMasks="AUDIO_CHANNEL_OUT_STEREO"/>
                 </mixPort>
-                <mixPort name="primary input" role="sink">
+                <mixPort name="primary input" role="sink"  maxActiveCount="1" maxOpenCount="1">
                     <profile name="" format="AUDIO_FORMAT_PCM_16_BIT"
                              samplingRates="48000"
                              channelMasks="AUDIO_CHANNEL_IN_STEREO"/>
diff --git a/services/audiopolicy/tests/spatializer_tests.cpp b/services/audiopolicy/tests/spatializer_tests.cpp
index 73bef43..0b40f32 100644
--- a/services/audiopolicy/tests/spatializer_tests.cpp
+++ b/services/audiopolicy/tests/spatializer_tests.cpp
@@ -33,6 +33,38 @@
 using media::audio::common::HeadTracking;
 using media::audio::common::Spatialization;
 
+// Test Spatializer Helper Methods
+
+TEST(Spatializer, containsImmersiveChannelMask) {
+    // Regardless of the implementation, we expect the following
+    // behavior.
+
+    // Pure non-immersive
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_STEREO }));
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO }));
+    EXPECT_FALSE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+              AUDIO_CHANNEL_OUT_STEREO, AUDIO_CHANNEL_OUT_MONO }));
+
+    // Pure immersive
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_5POINT1 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_7POINT1 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_5POINT1, AUDIO_CHANNEL_OUT_7POINT1,
+              AUDIO_CHANNEL_OUT_22POINT2 }));
+
+    // Mixed immersive/non-immersive
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_7POINT1POINT4 }));
+    EXPECT_TRUE(Spatializer::containsImmersiveChannelMask(
+            { AUDIO_CHANNEL_OUT_MONO, AUDIO_CHANNEL_OUT_STEREO,
+              AUDIO_CHANNEL_OUT_7POINT1 }));
+}
+
 class TestSpatializerPolicyCallback :
         public SpatializerPolicyCallback {
 public:
@@ -68,7 +100,7 @@
         mSpatializer->setOutput(AUDIO_IO_HANDLE_NONE);
         mSpatializer->setDesiredHeadTrackingMode(HeadTracking::Mode::DISABLED);
         mSpatializer->setHeadSensor(SpatializerPoseController::INVALID_SENSOR);
-        mSpatializer->updateActiveTracks(0);
+        mSpatializer->updateActiveTracks({});
     }
 
     static constexpr audio_io_handle_t sTestOutput= 1977;
@@ -174,12 +206,12 @@
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
 
     // requested latency mode must be low if at least one spatialized tracks is active
-    mSpatializer->updateActiveTracks(1);
+    mSpatializer->updateActiveTracks({AUDIO_CHANNEL_OUT_5POINT1});
     requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_LOW);
 
     // requested latency mode must be free after stopping the last spatialized tracks
-    mSpatializer->updateActiveTracks(0);
+    mSpatializer->updateActiveTracks({});
     requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
 }
@@ -202,7 +234,7 @@
 
     // requested latency mode must be low software if at least one spatialized tracks is active
     // and the only supported low latency mode is low software
-    mSpatializer->updateActiveTracks(1);
+    mSpatializer->updateActiveTracks({AUDIO_CHANNEL_OUT_5POINT1});
     requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_DYNAMIC_SPATIAL_AUDIO_SOFTWARE);
 
@@ -225,7 +257,7 @@
     }
 
     // requested latency mode must be free after stopping the last spatialized tracks
-    mSpatializer->updateActiveTracks(0);
+    mSpatializer->updateActiveTracks({});
     requestedLatencyMode = mSpatializer->getRequestedLatencyMode();
     ASSERT_EQ(requestedLatencyMode, AUDIO_LATENCY_MODE_FREE);
 }
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index d9d8a3d..d21241b 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -22,13 +22,9 @@
 #include <utils/Trace.h>
 #include <cutils/properties.h>
 
-#include "camera/CameraMetadata.h"
 #include "CameraFlashlight.h"
-#include "gui/IGraphicBufferConsumer.h"
-#include "gui/BufferQueue.h"
+#include "camera/CameraMetadata.h"
 #include "camera/camera2/CaptureRequest.h"
-#include "device3/Camera3Device.h"
-
 
 namespace android {
 
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 516e8f0..f94300e 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -784,12 +784,13 @@
     return true;
 }
 
-Status CameraService::getNumberOfCameras(int32_t type, int32_t deviceId, int32_t devicePolicy,
+Status CameraService::getNumberOfCameras(int32_t type,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         int32_t* numCameras) {
     ATRACE_CALL();
-    if (vd_flags::camera_device_awareness() && (deviceId != kDefaultDeviceId)
+    if (vd_flags::camera_device_awareness() && (clientAttribution.deviceId != kDefaultDeviceId)
             && (devicePolicy != IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT)) {
-        *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(deviceId);
+        *numCameras = mVirtualDeviceCameraIdMapper.getNumberOfCameras(clientAttribution.deviceId);
         return Status::ok();
     }
 
@@ -822,7 +823,7 @@
 }
 
 Status CameraService::createDefaultRequest(const std::string& unresolvedCameraId, int templateId,
-        int32_t deviceId, int32_t devicePolicy,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /* out */
         hardware::camera2::impl::CameraMetadataNative* request) {
     ATRACE_CALL();
@@ -837,11 +838,11 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -886,7 +887,7 @@
 Status CameraService::isSessionConfigurationWithParametersSupported(
         const std::string& unresolvedCameraId, int targetSdkVersion,
         const SessionConfiguration& sessionConfiguration,
-        int32_t deviceId, int32_t devicePolicy,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/ bool* supported) {
     ATRACE_CALL();
 
@@ -900,11 +901,11 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -983,7 +984,8 @@
 
 Status CameraService::getSessionCharacteristics(const std::string& unresolvedCameraId,
         int targetSdkVersion, int rotationOverride,
-        const SessionConfiguration& sessionConfiguration, int32_t deviceId, int32_t devicePolicy,
+        const SessionConfiguration& sessionConfiguration,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/ CameraMetadata* outMetadata) {
     ATRACE_CALL();
 
@@ -1000,11 +1002,11 @@
         return STATUS_ERROR(ERROR_DISCONNECTED, "Camera subsystem is not available");
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-                                                                  devicePolicy);
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                                       unresolvedCameraId.c_str(), deviceId);
+                                       unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1206,14 +1208,16 @@
     return mVirtualDeviceCameraIdMapper.getActualCameraId(deviceId, inputCameraId);
 }
 
-Status CameraService::getCameraInfo(int cameraId,  int rotationOverride, int32_t deviceId,
-        int32_t devicePolicy, CameraInfo* cameraInfo) {
+Status CameraService::getCameraInfo(int cameraId,  int rotationOverride,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy,
+        CameraInfo* cameraInfo) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
-    std::string cameraIdStr = cameraIdIntToStrLocked(cameraId, deviceId, devicePolicy);
+    std::string cameraIdStr =
+            cameraIdIntToStrLocked(cameraId, clientAttribution.deviceId, devicePolicy);
     if (cameraIdStr.empty()) {
         std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
-                cameraId, deviceId);
+                cameraId, clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1287,8 +1291,8 @@
 }
 
 Status CameraService::getCameraCharacteristics(const std::string& unresolvedCameraId,
-        int targetSdkVersion, int rotationOverride, int32_t deviceId, int32_t devicePolicy,
-        CameraMetadata* cameraInfo) {
+        int targetSdkVersion, int rotationOverride, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy, CameraMetadata* cameraInfo) {
     ATRACE_CALL();
 
     if (!cameraInfo) {
@@ -1303,11 +1307,11 @@
                 "Camera subsystem is not available");;
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional =
+            resolveCameraId(unresolvedCameraId, clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1340,16 +1344,17 @@
     return filterSensitiveMetadataIfNeeded(cameraId, cameraInfo);
 }
 
-Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId, int32_t deviceId,
+Status CameraService::getTorchStrengthLevel(const std::string& unresolvedCameraId,
+        const AttributionSourceState& clientAttribution,
         int32_t devicePolicy, int32_t* torchStrength) {
     ATRACE_CALL();
     Mutex::Autolock l(mServiceLock);
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -1606,13 +1611,16 @@
     std::string cameraIdStr = std::to_string(cameraId);
     Status ret = Status::ok();
     sp<Client> tmp = nullptr;
+
+    logConnectionAttempt(getCallingPid(), kServiceName, cameraIdStr, API_1);
+
     if (!(ret = connectHelper<ICameraClient,Client>(
             sp<ICameraClient>{nullptr}, cameraIdStr, cameraId,
             kServiceName, /*systemNativeClient*/ false, {}, uid, USE_CALLING_PID,
             API_1, /*shimUpdateOnly*/ true, /*oomScoreOffset*/ 0,
             /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
             /*rotationOverride*/hardware::ICameraService::ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
-            /*forceSlowJpegMode*/false, cameraIdStr, /*out*/ tmp)
+            /*forceSlowJpegMode*/false, cameraIdStr, /*isNonSystemNdk*/ false, /*out*/ tmp)
             ).isOk()) {
         ALOGE("%s: Error initializing shim metadata: %s", __FUNCTION__, ret.toString8().c_str());
     }
@@ -1682,17 +1690,14 @@
 }
 
 Status CameraService::validateConnectLocked(const std::string& cameraId,
-        const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid,
-        /*out*/int& originalClientPid) const {
+        const std::string& clientName8, /*inout*/int& clientUid, /*inout*/int& clientPid) const {
 
 #ifdef __BRILLO__
     UNUSED(clientName8);
     UNUSED(clientUid);
     UNUSED(clientPid);
-    UNUSED(originalClientPid);
 #else
-    Status allowed = validateClientPermissionsLocked(cameraId, clientName8, clientUid, clientPid,
-            originalClientPid);
+    Status allowed = validateClientPermissionsLocked(cameraId, clientName8, clientUid, clientPid);
     if (!allowed.isOk()) {
         return allowed;
     }
@@ -1730,8 +1735,7 @@
 }
 
 Status CameraService::validateClientPermissionsLocked(const std::string& cameraId,
-        const std::string& clientName, int& clientUid, int& clientPid,
-        /*out*/int& originalClientPid) const {
+        const std::string& clientName, int& clientUid, int& clientPid) const {
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
 
@@ -1814,12 +1818,10 @@
                 "is enabled", clientName.c_str(), clientPid, clientUid, cameraId.c_str());
     }
 
+    userid_t clientUserId = multiuser_get_user_id(clientUid);
+
     // Only use passed in clientPid to check permission. Use calling PID as the client PID that's
     // connected to camera service directly.
-    originalClientPid = clientPid;
-    clientPid = callingPid;
-
-    userid_t clientUserId = multiuser_get_user_id(clientUid);
 
     // For non-system clients : Only allow clients who are being used by the current foreground
     // device user, unless calling from our own process.
@@ -1840,11 +1842,11 @@
         if (isHeadlessSystemUserMode()
                 && (clientUserId == USER_SYSTEM)
                 && !hasPermissionsForCameraHeadlessSystemUser(cameraId, callingPid, callingUid)) {
-            ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", clientPid, clientUid);
+            ALOGE("Permission Denial: can't use the camera pid=%d, uid=%d", callingPid, clientUid);
             return STATUS_ERROR_FMT(ERROR_PERMISSION_DENIED,
                     "Caller \"%s\" (PID %d, UID %d) cannot open camera \"%s\" as Headless System \
                     User without camera headless system user permission",
-                    clientName.c_str(), clientPid, clientUid, cameraId.c_str());
+                    clientName.c_str(), callingPid, clientUid, cameraId.c_str());
         }
     }
 
@@ -2129,35 +2131,41 @@
 Status CameraService::connect(
         const sp<ICameraClient>& cameraClient,
         int api1CameraId,
-        const std::string& clientPackageName,
-        int clientUid,
-        int clientPid,
         int targetSdkVersion,
         int rotationOverride,
         bool forceSlowJpegMode,
-        int32_t deviceId,
+        const AttributionSourceState& clientAttribution,
         int32_t devicePolicy,
         /*out*/
         sp<ICamera>* device) {
     ATRACE_CALL();
     Status ret = Status::ok();
 
-    std::string cameraIdStr = cameraIdIntToStr(api1CameraId, deviceId, devicePolicy);
+    std::string cameraIdStr =
+            cameraIdIntToStr(api1CameraId, clientAttribution.deviceId, devicePolicy);
     if (cameraIdStr.empty()) {
         std::string msg = fmt::sprintf("Camera %d: Invalid camera id for device id %d",
-                api1CameraId, deviceId);
+                api1CameraId, clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
+    std::string clientPackageNameMaybe = clientAttribution.packageName.value_or("");
+    bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
+    std::string clientPackageName = resolvePackageName(clientAttribution.uid,
+            clientPackageNameMaybe);
+    logConnectionAttempt(clientAttribution.pid, clientPackageName, cameraIdStr, API_1);
+
     sp<Client> client = nullptr;
     ret = connectHelper<ICameraClient,Client>(cameraClient, cameraIdStr, api1CameraId,
-            clientPackageName, /*systemNativeClient*/ false, {}, clientUid, clientPid, API_1,
+            clientPackageName, /*systemNativeClient*/ false, {},
+            clientAttribution.uid, clientAttribution.pid, API_1,
             /*shimUpdateOnly*/ false, /*oomScoreOffset*/ 0, targetSdkVersion,
-            rotationOverride, forceSlowJpegMode, cameraIdStr, /*out*/client);
+            rotationOverride, forceSlowJpegMode, cameraIdStr, isNonSystemNdk, /*out*/client);
 
     if (!ret.isOk()) {
-        logRejected(cameraIdStr, getCallingPid(), clientPackageName, toStdString(ret.toString8()));
+        logRejected(cameraIdStr, getCallingPid(), clientAttribution.packageName.value_or(""),
+                toStdString(ret.toString8()));
         return ret;
     }
 
@@ -2234,47 +2242,50 @@
 Status CameraService::connectDevice(
         const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
         const std::string& unresolvedCameraId,
-        const std::string& clientPackageName,
-        const std::optional<std::string>& clientFeatureId,
-        int clientUid, int oomScoreOffset, int targetSdkVersion,
-        int rotationOverride, int32_t deviceId, int32_t devicePolicy,
+        int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/
         sp<hardware::camera2::ICameraDeviceUser>* device) {
     ATRACE_CALL();
     RunThreadWithRealtimePriority priorityBump;
     Status ret = Status::ok();
     sp<CameraDeviceClient> client = nullptr;
-    std::string clientPackageNameAdj = clientPackageName;
+    std::string clientPackageNameMaybe = clientAttribution.packageName.value_or("");
     int callingPid = getCallingPid();
     int callingUid = getCallingUid();
     bool systemNativeClient = false;
-    if (callerHasSystemUid() && (clientPackageNameAdj.size() == 0)) {
+    if (callerHasSystemUid() && (clientPackageNameMaybe.size() == 0)) {
         std::string systemClient = fmt::sprintf("client.pid<%d>", callingPid);
-        clientPackageNameAdj = systemClient;
+        clientPackageNameMaybe = systemClient;
         systemNativeClient = true;
     }
 
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
     std::string cameraId = cameraIdOptional.value();
 
+    bool isNonSystemNdk = clientPackageNameMaybe.size() == 0;
+    std::string clientPackageName = resolvePackageName(clientAttribution.uid,
+            clientPackageNameMaybe);
+    logConnectionAttempt(clientAttribution.pid, clientPackageName, cameraId, API_2);
+
     if (oomScoreOffset < 0) {
         std::string msg =
                 fmt::sprintf("Cannot increase the priority of a client %s pid %d for "
-                        "camera id %s", clientPackageNameAdj.c_str(), callingPid,
+                        "camera id %s", clientPackageName.c_str(), callingPid,
                         cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
 
-    userid_t clientUserId = multiuser_get_user_id(clientUid);
-    if (clientUid == USE_CALLING_UID) {
+    userid_t clientUserId = multiuser_get_user_id(clientAttribution.uid);
+    if (clientAttribution.uid == USE_CALLING_UID) {
         clientUserId = multiuser_get_user_id(callingUid);
     }
 
@@ -2294,19 +2305,19 @@
             && !isTrustedCallingUid(callingUid)) {
         std::string msg = fmt::sprintf("Cannot change the priority of a client %s pid %d for "
                         "camera id %s without SYSTEM_CAMERA permissions",
-                        clientPackageNameAdj.c_str(), callingPid, cameraId.c_str());
+                        clientPackageName.c_str(), callingPid, cameraId.c_str());
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(ERROR_PERMISSION_DENIED, msg.c_str());
     }
 
     ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb,
-            cameraId, /*api1CameraId*/-1, clientPackageNameAdj, systemNativeClient, clientFeatureId,
-            clientUid, USE_CALLING_PID, API_2, /*shimUpdateOnly*/ false, oomScoreOffset,
-            targetSdkVersion, rotationOverride, /*forceSlowJpegMode*/false, unresolvedCameraId,
-            /*out*/client);
+            cameraId, /*api1CameraId*/-1, clientPackageName, systemNativeClient,
+            clientAttribution.attributionTag, clientAttribution.uid, USE_CALLING_PID, API_2,
+            /*shimUpdateOnly*/ false, oomScoreOffset, targetSdkVersion, rotationOverride,
+            /*forceSlowJpegMode*/false, unresolvedCameraId, isNonSystemNdk, /*out*/client);
 
     if (!ret.isOk()) {
-        logRejected(cameraId, callingPid, clientPackageNameAdj, toStdString(ret.toString8()));
+        logRejected(cameraId, callingPid, clientPackageName, toStdString(ret.toString8()));
         return ret;
     }
 
@@ -2366,7 +2377,7 @@
     return false;
 }
 
-std::string CameraService::getPackageNameFromUid(int clientUid) {
+std::string CameraService::getPackageNameFromUid(int clientUid) const {
     std::string packageName("");
 
     sp<IPermissionController> permCtrl;
@@ -2411,38 +2422,44 @@
     return packageName;
 }
 
-template<class CALLBACK, class CLIENT>
-Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
-        int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
-        const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
-        apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
-        int rotationOverride, bool forceSlowJpegMode,
-        const std::string& originalCameraId, /*out*/sp<CLIENT>& device) {
-    binder::Status ret = binder::Status::ok();
-
-    bool isNonSystemNdk = false;
-    std::string clientPackageName;
-    int packageUid = (clientUid == USE_CALLING_UID) ?
-            getCallingUid() : clientUid;
-    if (clientPackageNameMaybe.size() <= 0) {
-        // NDK calls don't come with package names, but we need one for various cases.
-        // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
-        // do exist. For all authentication cases, all packages under the same UID get the
-        // same permissions, so picking any associated package name is sufficient. For some
-        // other cases, this may give inaccurate names for clients in logs.
-        isNonSystemNdk = true;
-        clientPackageName = getPackageNameFromUid(packageUid);
-    } else {
-        clientPackageName = clientPackageNameMaybe;
-    }
-
-    int originalClientPid = 0;
-
+void CameraService::logConnectionAttempt(int clientPid, const std::string& clientPackageName,
+        const std::string& cameraId, apiLevel effectiveApiLevel) const {
     int packagePid = (clientPid == USE_CALLING_PID) ?
         getCallingPid() : clientPid;
     ALOGI("CameraService::connect call (PID %d \"%s\", camera ID %s) and "
             "Camera API version %d", packagePid, clientPackageName.c_str(), cameraId.c_str(),
             static_cast<int>(effectiveApiLevel));
+}
+
+std::string CameraService::resolvePackageName(int clientUid,
+        const std::string& clientPackageNameMaybe) const {
+    if (clientPackageNameMaybe.size() <= 0) {
+        int packageUid = (clientUid == USE_CALLING_UID) ?
+                getCallingUid() : clientUid;
+        // NDK calls don't come with package names, but we need one for various cases.
+        // Generally, there's a 1:1 mapping between UID and package name, but shared UIDs
+        // do exist. For all authentication cases, all packages under the same UID get the
+        // same permissions, so picking any associated package name is sufficient. For some
+        // other cases, this may give inaccurate names for clients in logs.
+        return getPackageNameFromUid(packageUid);
+    } else {
+        return clientPackageNameMaybe;
+    }
+}
+
+template<class CALLBACK, class CLIENT>
+Status CameraService::connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
+        int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
+        const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
+        apiLevel effectiveApiLevel, bool shimUpdateOnly, int oomScoreOffset, int targetSdkVersion,
+        int rotationOverride, bool forceSlowJpegMode,
+        const std::string& originalCameraId, bool isNonSystemNdk, /*out*/sp<CLIENT>& device) {
+    binder::Status ret = binder::Status::ok();
+
+    int packageUid = (clientUid == USE_CALLING_UID) ?
+            getCallingUid() : clientUid;
+    int packagePid = (clientPid == USE_CALLING_PID) ?
+            getCallingPid() : clientPid;
 
     nsecs_t openTimeNs = systemTime();
 
@@ -2465,7 +2482,7 @@
 
         // Enforce client permissions and do basic validity checks
         if (!(ret = validateConnectLocked(cameraId, clientPackageName,
-                /*inout*/clientUid, /*inout*/clientPid, /*out*/originalClientPid)).isOk()) {
+                /*inout*/clientUid, /*inout*/clientPid)).isOk()) {
             return ret;
         }
 
@@ -2482,7 +2499,7 @@
 
         sp<BasicClient> clientTmp = nullptr;
         std::shared_ptr<resource_policy::ClientDescriptor<std::string, sp<BasicClient>>> partial;
-        if ((err = handleEvictionsLocked(cameraId, originalClientPid, effectiveApiLevel,
+        if ((err = handleEvictionsLocked(cameraId, clientPid, effectiveApiLevel,
                 IInterface::asBinder(cameraCb), clientPackageName, oomScoreOffset,
                 systemNativeClient, /*out*/&clientTmp, /*out*/&partial)) != NO_ERROR) {
             switch (err) {
@@ -2528,9 +2545,11 @@
         bool overrideForPerfClass = SessionConfigurationUtils::targetPerfClassPrimaryCamera(
                 mPerfClassPrimaryCameraIds, cameraId, targetSdkVersion);
 
+        // Only use passed in clientPid to check permission. Use calling PID as the client PID
+        // that's connected to camera service directly.
         if(!(ret = makeClient(this, cameraCb, clientPackageName, systemNativeClient,
                 clientFeatureId, cameraId, api1CameraId, facing,
-                orientation, clientPid, clientUid, getpid(),
+                orientation, getCallingPid(), clientUid, getpid(),
                 deviceVersionAndTransport, effectiveApiLevel, overrideForPerfClass,
                 rotationOverride, forceSlowJpegMode, originalCameraId,
                 /*out*/&tmp)).isOk()) {
@@ -2826,8 +2845,8 @@
 }
 
 Status CameraService::turnOnTorchWithStrengthLevel(const std::string& unresolvedCameraId,
-        int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
-        int32_t devicePolicy) {
+        int32_t torchStrength, const sp<IBinder>& clientBinder,
+        const AttributionSourceState& clientAttribution, int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2838,11 +2857,11 @@
     }
 
     int uid = getCallingUid();
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -2964,7 +2983,8 @@
 }
 
 Status CameraService::setTorchMode(const std::string& unresolvedCameraId, bool enabled,
-        const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy) {
+        const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
+        int32_t devicePolicy) {
     Mutex::Autolock lock(mServiceLock);
 
     ATRACE_CALL();
@@ -2975,11 +2995,11 @@
     }
 
     int uid = getCallingUid();
-    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId, deviceId,
-            devicePolicy);
+    std::optional<std::string> cameraIdOptional = resolveCameraId(unresolvedCameraId,
+            clientAttribution.deviceId, devicePolicy);
     if (!cameraIdOptional.has_value()) {
         std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                unresolvedCameraId.c_str(), deviceId);
+                unresolvedCameraId.c_str(), clientAttribution.deviceId);
         ALOGE("%s: %s", __FUNCTION__, msg.c_str());
         return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
     }
@@ -3310,7 +3330,7 @@
 
 Status CameraService::isConcurrentSessionConfigurationSupported(
         const std::vector<CameraIdAndSessionConfiguration>& cameraIdsAndSessionConfigurations,
-        int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+        int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/bool* isSupported) {
     if (!isSupported) {
         ALOGE("%s: isSupported is NULL", __FUNCTION__);
@@ -3325,10 +3345,11 @@
 
     for (auto cameraIdAndSessionConfiguration : cameraIdsAndSessionConfigurations) {
         std::optional<std::string> cameraIdOptional =
-                resolveCameraId(cameraIdAndSessionConfiguration.mCameraId, deviceId, devicePolicy);
+                resolveCameraId(cameraIdAndSessionConfiguration.mCameraId,
+                        clientAttribution.deviceId, devicePolicy);
         if (!cameraIdOptional.has_value()) {
             std::string msg = fmt::sprintf("Camera %s: Invalid camera id for device id %d",
-                    cameraIdAndSessionConfiguration.mCameraId.c_str(), deviceId);
+                    cameraIdAndSessionConfiguration.mCameraId.c_str(), clientAttribution.deviceId);
             ALOGE("%s: %s", __FUNCTION__, msg.c_str());
             return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.c_str());
         }
@@ -3341,7 +3362,7 @@
     bool hasCameraPermission = ((callingPid == getpid()) ||
             hasPermissionsForCamera(callingPid, callingUid,
                     devicePolicy == IVirtualDeviceManagerNative::DEVICE_POLICY_DEFAULT
-                        ? kDefaultDeviceId : deviceId));
+                        ? kDefaultDeviceId : clientAttribution.deviceId));
     if (!hasCameraPermission) {
         return STATUS_ERROR(ERROR_PERMISSION_DENIED,
                 "android.permission.CAMERA needed to call"
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index 9998fb8..d5c57cb 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -154,13 +154,16 @@
     // resolveCameraId(logicalCameraId, deviceId, devicePolicy) to arrive at the correct
     // cameraId to perform the operation on (in case of contexts
     // associated with virtual devices).
-    virtual binder::Status     getNumberOfCameras(int32_t type, int32_t deviceId,
+    virtual binder::Status     getNumberOfCameras(int32_t type,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, int32_t* numCameras);
 
     virtual binder::Status     getCameraInfo(int cameraId, int rotationOverride,
-            int32_t deviceId, int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy, hardware::CameraInfo* cameraInfo) override;
     virtual binder::Status     getCameraCharacteristics(const std::string& cameraId,
-            int targetSdkVersion, int rotationOverride, int32_t deviceId,
+            int targetSdkVersion, int rotationOverride,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, CameraMetadata* cameraInfo) override;
     virtual binder::Status     getCameraVendorTagDescriptor(
             /*out*/
@@ -170,17 +173,15 @@
             hardware::camera2::params::VendorTagDescriptorCache* cache);
 
     virtual binder::Status     connect(const sp<hardware::ICameraClient>& cameraClient,
-            int32_t cameraId, const std::string& clientPackageName,
-            int32_t clientUid, int clientPid, int targetSdkVersion,
-            int rotationOverride, bool forceSlowJpegMode, int32_t deviceId,
+            int32_t cameraId, int targetSdkVersion, int rotationOverride, bool forceSlowJpegMode,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, /*out*/ sp<hardware::ICamera>* device) override;
 
     virtual binder::Status     connectDevice(
             const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
-            const std::string& cameraId,
-            const std::string& clientPackageName, const std::optional<std::string>& clientFeatureId,
-            int32_t clientUid, int scoreOffset, int targetSdkVersion, int rotationOverride,
-            int32_t deviceId, int32_t devicePolicy,
+            const std::string& cameraId, int scoreOffset, int targetSdkVersion,
+            int rotationOverride, const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy,
             /*out*/
             sp<hardware::camera2::ICameraDeviceUser>* device);
 
@@ -196,7 +197,7 @@
 
     virtual binder::Status isConcurrentSessionConfigurationSupported(
         const std::vector<hardware::camera2::utils::CameraIdAndSessionConfiguration>& sessions,
-        int targetSdkVersion, int32_t deviceId, int32_t devicePolicy,
+        int targetSdkVersion, const AttributionSourceState& clientAttribution, int32_t devicePolicy,
         /*out*/bool* supported);
 
     virtual binder::Status    getLegacyParameters(
@@ -205,13 +206,16 @@
             std::string* parameters);
 
     virtual binder::Status    setTorchMode(const std::string& cameraId, bool enabled,
-            const sp<IBinder>& clientBinder, int32_t deviceId, int32_t devicePolicy);
-
-    virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
-            int32_t torchStrength, const sp<IBinder>& clientBinder, int32_t deviceId,
+            const sp<IBinder>& clientBinder, const AttributionSourceState& clientAttribution,
             int32_t devicePolicy);
 
-    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId, int32_t deviceId,
+    virtual binder::Status    turnOnTorchWithStrengthLevel(const std::string& cameraId,
+            int32_t torchStrength, const sp<IBinder>& clientBinder,
+            const AttributionSourceState& clientAttribution,
+            int32_t devicePolicy);
+
+    virtual binder::Status    getTorchStrengthLevel(const std::string& cameraId,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, int32_t* torchStrength);
 
     virtual binder::Status    notifySystemEvent(int32_t eventId,
@@ -247,19 +251,20 @@
             const hardware::camera2::impl::CameraMetadataNative& sessionParams);
 
     virtual binder::Status createDefaultRequest(const std::string& cameraId, int templateId,
-            int32_t deviceId, int32_t devicePolicy,
+            const AttributionSourceState& clientAttribution, int32_t devicePolicy,
             /*out*/
             hardware::camera2::impl::CameraMetadataNative* request);
 
     virtual binder::Status isSessionConfigurationWithParametersSupported(
             const std::string& cameraId, int targetSdkVersion,
             const SessionConfiguration& sessionConfiguration,
-            int32_t deviceId, int32_t devicePolicy,
+            const AttributionSourceState& clientAttribution, int32_t devicePolicy,
             /*out*/ bool* supported);
 
     virtual binder::Status getSessionCharacteristics(
             const std::string& cameraId, int targetSdkVersion, int rotationOverride,
-            const SessionConfiguration& sessionConfiguration, int32_t deviceId,
+            const SessionConfiguration& sessionConfiguration,
+            const AttributionSourceState& clientAttribution,
             int32_t devicePolicy, /*out*/ CameraMetadata* outMetadata);
 
     // Extra permissions checks
@@ -942,17 +947,17 @@
     void removeStates(const std::string& id);
 
     // Check if we can connect, before we acquire the service lock.
-    // The returned originalClientPid is the PID of the original process that wants to connect to
-    // camera.
-    // The returned clientPid is the PID of the client that directly connects to camera.
-    // originalClientPid and clientPid are usually the same except when the application uses
-    // mediaserver to connect to camera (using MediaRecorder to connect to camera). In that case,
-    // clientPid is the PID of mediaserver and originalClientPid is the PID of the application.
+    // If clientPid/clientUid are USE_CALLING_PID/USE_CALLING_UID, they will be overwritten with
+    // the calling pid/uid.
     binder::Status validateConnectLocked(const std::string& cameraId, const std::string& clientName,
-            /*inout*/int& clientUid, /*inout*/int& clientPid, /*out*/int& originalClientPid) const;
+            /*inout*/int& clientUid, /*inout*/int& clientPid) const;
     binder::Status validateClientPermissionsLocked(const std::string& cameraId,
-            const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid,
-            /*out*/int& originalClientPid) const;
+            const std::string& clientName, /*inout*/int& clientUid, /*inout*/int& clientPid) const;
+
+    // If clientPackageNameMaybe is empty, attempts to resolve the package name.
+    std::string resolvePackageName(int clientUid, const std::string& clientPackageNameMaybe) const;
+    void logConnectionAttempt(int clientPid, const std::string& clientPackageName,
+        const std::string& cameraId, apiLevel effectiveApiLevel) const;
 
     bool isCameraPrivacyEnabled(const String16& packageName,const std::string& cameraId,
            int clientPid, int ClientUid);
@@ -997,16 +1002,16 @@
     // as for legacy apps we will toggle the app op for all packages in the UID.
     // The caveat is that the operation may be attributed to the wrong package and
     // stats based on app ops may be slightly off.
-    std::string getPackageNameFromUid(int clientUid);
+    std::string getPackageNameFromUid(int clientUid) const;
 
     // Single implementation shared between the various connect calls
     template<class CALLBACK, class CLIENT>
     binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const std::string& cameraId,
-            int api1CameraId, const std::string& clientPackageNameMaybe, bool systemNativeClient,
+            int api1CameraId, const std::string& clientPackageName, bool systemNativeClient,
             const std::optional<std::string>& clientFeatureId, int clientUid, int clientPid,
             apiLevel effectiveApiLevel, bool shimUpdateOnly, int scoreOffset, int targetSdkVersion,
             int rotationOverride, bool forceSlowJpegMode,
-            const std::string& originalCameraId,
+            const std::string& originalCameraId, bool isNonSystemNdk,
             /*out*/sp<CLIENT>& device);
 
     // Lock guarding camera service state
diff --git a/services/camera/libcameraservice/aidl/AidlCameraService.cpp b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
index 2886942..7f674bd 100644
--- a/services/camera/libcameraservice/aidl/AidlCameraService.cpp
+++ b/services/camera/libcameraservice/aidl/AidlCameraService.cpp
@@ -28,6 +28,7 @@
 #include <binder/Status.h>
 #include <camera/CameraUtils.h>
 #include <hidl/HidlTransportSupport.h>
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
 
 namespace android::frameworks::cameraservice::service::implementation {
@@ -89,10 +90,15 @@
     if (_aidl_return == nullptr) { return fromSStatus(SStatus::ILLEGAL_ARGUMENT); }
 
     ::android::CameraMetadata cameraMetadata;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
     UStatus ret = mCameraService->getCameraCharacteristics(in_cameraId,
                                                            mVndkVersion,
                                                            ROTATION_OVERRIDE_NONE,
-                                                           kDefaultDeviceId,
+                                                           clientAttribution,
                                                            /* devicePolicy= */ 0,
                                                            &cameraMetadata);
     if (!ret.isOk()) {
@@ -143,16 +149,20 @@
         return fromSStatus(SStatus::UNKNOWN_ERROR);
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
     binder::Status serviceRet = mCameraService->connectDevice(
             callbacks,
             in_cameraId,
-            std::string(),
-            /* clientFeatureId= */{},
-            hardware::ICameraService::USE_CALLING_UID,
             /* scoreOffset= */ 0,
             /* targetSdkVersion= */ __ANDROID_API_FUTURE__,
             ROTATION_OVERRIDE_NONE,
-            kDefaultDeviceId,
+            clientAttribution,
             /* devicePolicy= */ 0,
             &unstableDevice);
     if (!serviceRet.isOk()) {
diff --git a/services/camera/libcameraservice/api1/Camera2Client.cpp b/services/camera/libcameraservice/api1/Camera2Client.cpp
index 0d89b3e..861414f 100644
--- a/services/camera/libcameraservice/api1/Camera2Client.cpp
+++ b/services/camera/libcameraservice/api1/Camera2Client.cpp
@@ -24,11 +24,12 @@
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/CameraUtils.h>
 #include <camera/StringUtils.h>
+#include <com_android_internal_camera_flags.h>
 #include <cutils/properties.h>
 #include <gui/Surface.h>
-#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 
 #include "api1/Camera2Client.h"
 
@@ -50,6 +51,8 @@
 namespace android {
 using namespace camera2;
 
+namespace flags = com::android::internal::camera::flags;
+
 // Interface used by CameraService
 
 Camera2Client::Camera2Client(const sp<CameraService>& cameraService,
@@ -501,7 +504,16 @@
     bool hasDeviceError = mDevice->hasDeviceError();
     mDevice->disconnect();
 
-    CameraService::Client::disconnect();
+    if (flags::api1_release_binderlock_before_cameraservice_disconnect()) {
+        // CameraService::Client::disconnect calls CameraService which attempts to lock
+        // CameraService's mServiceLock. This might lead to a deadlock if the cameraservice is
+        // currently waiting to lock mSerializationLock on another thread.
+        mBinderSerializationLock.unlock();
+        CameraService::Client::disconnect();
+        mBinderSerializationLock.lock();
+    } else {
+        CameraService::Client::disconnect();
+    }
 
     int32_t closeLatencyMs = ns2ms(systemTime() - startTime);
     mCameraServiceProxyWrapper->logClose(mCameraIdStr, closeLatencyMs, hasDeviceError);
diff --git a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
index 17db20b..2fbf49e 100644
--- a/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/CallbackProcessor.cpp
@@ -18,9 +18,10 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -113,6 +114,12 @@
     if (!mCallbackToApp && mCallbackConsumer == 0) {
         // Create CPU buffer queue endpoint, since app hasn't given us one
         // Make it async to avoid disconnect deadlocks
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mCallbackConsumer = new CpuConsumer(kCallbackHeapCount);
+        mCallbackConsumer->setFrameAvailableListener(this);
+        mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
+        mCallbackWindow = mCallbackConsumer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -120,6 +127,7 @@
         mCallbackConsumer->setFrameAvailableListener(this);
         mCallbackConsumer->setName(String8("Camera2-CallbackConsumer"));
         mCallbackWindow = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
     if (mCallbackStreamId != NO_STREAM) {
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index eb00bf8..3a0489c 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -25,9 +25,10 @@
 
 #include <binder/MemoryBase.h>
 #include <binder/MemoryHeapBase.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -93,6 +94,12 @@
 
     if (mCaptureConsumer == 0) {
         // Create CPU buffer queue endpoint
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mCaptureConsumer = new CpuConsumer(1);
+        mCaptureConsumer->setFrameAvailableListener(this);
+        mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
+        mCaptureWindow = mCaptureConsumer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -100,6 +107,7 @@
         mCaptureConsumer->setFrameAvailableListener(this);
         mCaptureConsumer->setName(String8("Camera2-JpegConsumer"));
         mCaptureWindow = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     }
 
     // Since ashmem heaps are rounded up to page size, don't reallocate if
diff --git a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
index d6c2415..d4953c1 100644
--- a/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/ZslProcessor.cpp
@@ -27,10 +27,11 @@
 
 #include <inttypes.h>
 
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <gui/Surface.h>
-#include <camera/StringUtils.h>
 
 #include "common/CameraDeviceBase.h"
 #include "api1/Camera2Client.h"
@@ -250,7 +251,11 @@
     if (mZslStreamId == NO_STREAM) {
         // Create stream for HAL production
         // TODO: Sort out better way to select resolution for ZSL
-
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mProducer = new RingBufferConsumer(GRALLOC_USAGE_HW_CAMERA_ZSL, mBufferQueueDepth);
+        mProducer->setName("Camera2-ZslRingBufferConsumer");
+        sp<Surface> outSurface = mProducer->getSurface();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
@@ -258,6 +263,7 @@
             mBufferQueueDepth);
         mProducer->setName("Camera2-ZslRingBufferConsumer");
         sp<Surface> outSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         res = device->createStream(outSurface, params.fastInfo.usedZslSize.width,
             params.fastInfo.usedZslSize.height, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 1b7fc6e..fa569ce 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -23,7 +23,7 @@
 #include <android/hardware/camera2/ICameraDeviceCallbacks.h>
 #include <camera/CameraMetadata.h>
 #include <camera/camera2/OutputConfiguration.h>
-#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
 #include "common/CameraDeviceBase.h"
 #include "device3/Camera3StreamInterface.h"
 
@@ -96,9 +96,12 @@
             const CameraMetadata& settings) override;
 
 protected:
-    struct ProducerListener : public BnProducerListener {
-        // ProducerListener impementation
+    struct StreamSurfaceListener : public SurfaceListener {
+        // StreamSurfaceListener implementation
         void onBufferReleased() override { /*No impl. for now*/ };
+        bool needsReleaseNotify() override { return true; };
+        void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /*buffers*/) override {};
+        void onBufferDetached(int /*slot*/) override {};
     };
 
     status_t registerCompositeStreamListener(int32_t streamId);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
index 8b41d00..244a1e5 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.cpp
@@ -22,13 +22,15 @@
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
 #include <camera/StringUtils.h>
 
-#include "api1/client2/JpegProcessor.h"
-#include "common/CameraProviderManager.h"
-#include "utils/SessionConfigurationUtils.h"
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
 
+#include "api1/client2/JpegProcessor.h"
+#include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
+
 #include "DepthCompositeStream.h"
 
 namespace android {
@@ -48,7 +50,7 @@
         mBlobHeight(0),
         mDepthBufferAcquired(false),
         mBlobBufferAcquired(false),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mMaxJpegBufferSize(-1),
         mUHRMaxJpegBufferSize(-1),
         mIsLogicalCamera(false) {
@@ -614,6 +616,12 @@
         return NO_INIT;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mBlobConsumer->setFrameAvailableListener(this);
+    mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
+    mBlobSurface = mBlobConsumer->getSurface();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -621,6 +629,7 @@
     mBlobConsumer->setFrameAvailableListener(this);
     mBlobConsumer->setName(String8("Camera3-JpegCompositeStream"));
     mBlobSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     ret = device->createStream(mBlobSurface, width, height, format, kJpegDataSpace, rotation,
             id, physicalCameraId, sensorPixelModesUsed, surfaceIds,
@@ -639,11 +648,18 @@
         return ret;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mDepthConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mDepthConsumer->setFrameAvailableListener(this);
+    mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
+    mDepthSurface = mDepthConsumer->getSurface();
+#else
     BufferQueue::createBufferQueue(&producer, &consumer);
     mDepthConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
     mDepthConsumer->setFrameAvailableListener(this);
     mDepthConsumer->setName(String8("Camera3-DepthCompositeStream"));
     mDepthSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     std::vector<int> depthSurfaceId;
     ret = device->createStream(mDepthSurface, depthWidth, depthHeight, kDepthMapPixelFormat,
             kDepthMapDataSpace, rotation, &mDepthStreamId, physicalCameraId, sensorPixelModesUsed,
@@ -690,7 +706,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mBlobStreamId);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index 3f8f6a2..75deef7 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -130,12 +130,12 @@
     static const auto kDepthMapDataSpace = HAL_DATASPACE_DEPTH;
     static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
 
-    int                  mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
-    size_t               mBlobWidth, mBlobHeight;
-    sp<CpuConsumer>      mBlobConsumer, mDepthConsumer;
-    bool                 mDepthBufferAcquired, mBlobBufferAcquired;
-    sp<Surface>          mDepthSurface, mBlobSurface, mOutputSurface;
-    sp<ProducerListener> mProducerListener;
+    int                         mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
+    size_t                      mBlobWidth, mBlobHeight;
+    sp<CpuConsumer>             mBlobConsumer, mDepthConsumer;
+    bool                        mDepthBufferAcquired, mBlobBufferAcquired;
+    sp<Surface>                 mDepthSurface, mBlobSurface, mOutputSurface;
+    sp<StreamSurfaceListener>   mStreamSurfaceListener;
 
     ssize_t              mMaxJpegBufferSize;
     ssize_t              mUHRMaxJpegBufferSize;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
index ea4f6fd..206c879 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -25,11 +25,12 @@
 
 #include <aidl/android/hardware/camera/device/CameraBlob.h>
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
-#include <libyuv.h>
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
+#include <libyuv.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <camera/StringUtils.h>
 
 #include <mediadrm/ICrypto.h>
 #include <media/MediaCodecBuffer.h>
@@ -68,7 +69,7 @@
         mMainImageStreamId(-1),
         mMainImageSurfaceId(-1),
         mYuvBufferAcquired(false),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mDequeuedOutputBufferCnt(0),
         mCodecOutputCounter(0),
         mQuality(-1),
@@ -142,6 +143,13 @@
         return NO_INIT;
     }
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mAppSegmentConsumer = new CpuConsumer(kMaxAcquiredAppSegment);
+    mAppSegmentConsumer->setFrameAvailableListener(this);
+    mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+    mAppSegmentSurface = mAppSegmentConsumer->getSurface();
+    sp<IGraphicBufferProducer> producer = mAppSegmentSurface->getIGraphicBufferProducer();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -149,6 +157,7 @@
     mAppSegmentConsumer->setFrameAvailableListener(this);
     mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
     mAppSegmentSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     mStaticInfo = device->info();
 
@@ -178,8 +187,13 @@
             return res;
         }
     } else {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mMainImageConsumer = new CpuConsumer(1);
+        producer = mMainImageConsumer->getSurface()->getIGraphicBufferProducer();
+#else
         BufferQueue::createBufferQueue(&producer, &consumer);
         mMainImageConsumer = new CpuConsumer(consumer, 1);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         mMainImageConsumer->setFrameAvailableListener(this);
         mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
     }
@@ -513,7 +527,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mMainImageStreamId);
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
index 9cfd78b..ba10e05 100644
--- a/services/camera/libcameraservice/api2/HeicCompositeStream.h
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -19,7 +19,6 @@
 
 #include <queue>
 
-#include <gui/IProducerListener.h>
 #include <gui/CpuConsumer.h>
 
 #include <media/hardware/VideoAPI.h>
@@ -234,10 +233,10 @@
     bool              mYuvBufferAcquired; // Only applicable to HEVC codec
     std::queue<int64_t> mMainImageFrameNumbers;
 
-    static const int32_t kMaxOutputSurfaceProducerCount = 1;
-    sp<Surface>       mOutputSurface;
-    sp<ProducerListener> mProducerListener;
-    int32_t           mDequeuedOutputBufferCnt;
+    static const int32_t        kMaxOutputSurfaceProducerCount = 1;
+    sp<Surface>                 mOutputSurface;
+    sp<StreamSurfaceListener>   mStreamSurfaceListener;
+    int32_t                     mDequeuedOutputBufferCnt;
 
     // Map from frame number to JPEG setting of orientation+quality
     struct HeicSettings {
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
index dadefcc..c5bd7a9 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.cpp
@@ -14,9 +14,6 @@
  * limitations under the License.
  */
 
-#include "hardware/gralloc.h"
-#include "system/graphics-base-v1.0.h"
-#include "system/graphics-base-v1.1.h"
 #define LOG_TAG "Camera3-JpegRCompositeStream"
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
@@ -25,11 +22,16 @@
 #include <aidl/android/hardware/camera/device/CameraBlobId.h>
 
 #include "common/CameraProviderManager.h"
+#include "utils/SessionConfigurationUtils.h"
+
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/Surface.h>
+#include <hardware/gralloc.h>
+#include <system/graphics-base-v1.0.h>
+#include <system/graphics-base-v1.1.h>
 #include <ultrahdr/jpegr.h>
 #include <utils/ExifUtils.h>
 #include <utils/Log.h>
-#include "utils/SessionConfigurationUtils.h"
 #include <utils/Trace.h>
 
 #include "JpegRCompositeStream.h"
@@ -54,7 +56,7 @@
         mOutputColorSpace(ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_UNSPECIFIED),
         mOutputStreamUseCase(0),
         mFirstRequestLatency(-1),
-        mProducerListener(new ProducerListener()),
+        mStreamSurfaceListener(new StreamSurfaceListener()),
         mMaxJpegBufferSize(-1),
         mUHRMaxJpegBufferSize(-1),
         mStaticInfo(device->info()) {
@@ -573,6 +575,12 @@
             mStaticInfo, mP010DynamicRange,
             ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP_STANDARD);
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mP010Consumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+    mP010Consumer->setFrameAvailableListener(this);
+    mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
+    mP010Surface = mP010Consumer->getSurface();
+#else
     sp<IGraphicBufferProducer> producer;
     sp<IGraphicBufferConsumer> consumer;
     BufferQueue::createBufferQueue(&producer, &consumer);
@@ -580,6 +588,7 @@
     mP010Consumer->setFrameAvailableListener(this);
     mP010Consumer->setName(String8("Camera3-P010CompositeStream"));
     mP010Surface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     auto ret = device->createStream(mP010Surface, width, height, kP010PixelFormat,
             static_cast<android_dataspace>(mP010DataSpace), rotation,
@@ -597,11 +606,18 @@
     }
 
     if (mSupportInternalJpeg) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mBlobConsumer = new CpuConsumer(/*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
+        mBlobConsumer->setFrameAvailableListener(this);
+        mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
+        mBlobSurface = mBlobConsumer->getSurface();
+#else
         BufferQueue::createBufferQueue(&producer, &consumer);
         mBlobConsumer = new CpuConsumer(consumer, /*maxLockedBuffers*/ 1, /*controlledByApp*/ true);
         mBlobConsumer->setFrameAvailableListener(this);
         mBlobConsumer->setName(String8("Camera3-JpegRCompositeStream"));
         mBlobSurface = new Surface(producer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         std::vector<int> blobSurfaceId;
         ret = device->createStream(mBlobSurface, width, height, format,
                 kJpegDataSpace, rotation, &mBlobStreamId, physicalCameraId, sensorPixelModesUsed,
@@ -653,7 +669,7 @@
         return NO_INIT;
     }
 
-    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+    auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mStreamSurfaceListener);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mP010StreamId);
diff --git a/services/camera/libcameraservice/api2/JpegRCompositeStream.h b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
index 6669739..d3ab19c 100644
--- a/services/camera/libcameraservice/api2/JpegRCompositeStream.h
+++ b/services/camera/libcameraservice/api2/JpegRCompositeStream.h
@@ -128,7 +128,8 @@
     int32_t              mOutputColorSpace;
     int64_t              mOutputStreamUseCase;
     nsecs_t              mFirstRequestLatency;
-    sp<ProducerListener> mProducerListener;
+
+    sp<StreamSurfaceListener> mStreamSurfaceListener;
 
     ssize_t              mMaxJpegBufferSize;
     ssize_t              mUHRMaxJpegBufferSize;
diff --git a/services/camera/libcameraservice/device3/Camera3BufferManager.h b/services/camera/libcameraservice/device3/Camera3BufferManager.h
index 64aaa230..27fcf96 100644
--- a/services/camera/libcameraservice/device3/Camera3BufferManager.h
+++ b/services/camera/libcameraservice/device3/Camera3BufferManager.h
@@ -68,7 +68,7 @@
      * by the consumer end point, the BufferQueueProducer callback onBufferReleased will call
      * returnBufferForStream() to return the free buffer to this buffer manager. If the stream
      * uses buffer manager to manage the stream buffers, it should disable the BufferQueue
-     * allocation via IGraphicBufferProducer::allowAllocation(false).
+     * allocation via Surface::allowAllocation(false).
      *
      * Registering an already registered stream has no effect.
      *
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 9f6829c..11891e9 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -612,10 +612,35 @@
 
     {
         lines = "    Last request sent:\n";
+        LatestRequestInfo lastRequestInfo = getLatestRequestInfoLocked();
+        // Print out output and input stream ids
+        if (flags::dumpsys_request_stream_ids()) {
+            if (lastRequestInfo.outputStreamIds.size() != 0) {
+                lines += "      Output Stream Ids:\n";
+                for (const auto &streamId: lastRequestInfo.outputStreamIds) {
+                    lines +=  "         " + std::to_string(streamId) + "\n";
+                }
+            }
+            if (lastRequestInfo.inputStreamId != -1) {
+                lines += "       Input Stream Id: " + std::to_string(lastRequestInfo.inputStreamId)
+                        + "\n";
+            }
+        }
+        // Keeping this write() outside the flagged if makes it easier while
+        // removing the flag.
+        write(fd, lines.c_str(), lines.size());
+        lines = "    Logical request settings:\n";
+        CameraMetadata lastRequestSettings = lastRequestInfo.requestSettings;
         write(fd, lines.c_str(), lines.size());
 
-        CameraMetadata lastRequest = getLatestRequestLocked();
-        lastRequest.dump(fd, /*verbosity*/2, /*indentation*/6);
+        lastRequestSettings.dump(fd, /*verbosity=all info*/2, /*indentation*/6);
+        if (flags::dumpsys_request_stream_ids()) {
+            for (const auto& pair: lastRequestInfo.physicalRequestSettings) {
+                lines = "    Physical request settings for camera id " + pair.first + ":\n";
+                write(fd, lines.c_str(), lines.size());
+                pair.second.dump(fd, /*verbosity=all info*/2, /*indentation*/8);
+            }
+        }
     }
 
     if (dumpTemplates) {
@@ -2911,13 +2936,13 @@
     camera3::flushInflightRequests(states);
 }
 
-CameraMetadata Camera3Device::getLatestRequestLocked() {
+Camera3Device::LatestRequestInfo Camera3Device::getLatestRequestInfoLocked() {
     ALOGV("%s", __FUNCTION__);
 
-    CameraMetadata retVal;
+    LatestRequestInfo retVal;
 
     if (mRequestThread != NULL) {
-        retVal = mRequestThread->getLatestRequest();
+        retVal = mRequestThread->getLatestRequestInfo();
     }
 
     return retVal;
@@ -3487,30 +3512,40 @@
     if (halRequest.settings != nullptr) { // Don't update if they were unchanged
         Mutex::Autolock al(mLatestRequestMutex);
 
-        camera_metadata_t* cloned = clone_camera_metadata(halRequest.settings);
-        mLatestRequest.acquire(cloned);
+        // Fill in latest request and physical request
+        camera_metadata_t *cloned = clone_camera_metadata(halRequest.settings);
+        mLatestRequestInfo.requestSettings.acquire(cloned);
 
-        mLatestPhysicalRequest.clear();
+        mLatestRequestInfo.physicalRequestSettings.clear();
+        mLatestRequestInfo.outputStreamIds.clear();
         for (uint32_t i = 0; i < halRequest.num_physcam_settings; i++) {
             cloned = clone_camera_metadata(halRequest.physcam_settings[i]);
-            mLatestPhysicalRequest.emplace(halRequest.physcam_id[i],
-                    CameraMetadata(cloned));
+            mLatestRequestInfo.physicalRequestSettings.emplace(halRequest.physcam_id[i],
+                                           CameraMetadata(cloned));
         }
 
         if (parent != nullptr) {
             int32_t inputStreamId = -1;
             if (halRequest.input_buffer != nullptr) {
               inputStreamId = Camera3Stream::cast(halRequest.input_buffer->stream)->getId();
+              mLatestRequestInfo.inputStreamId = inputStreamId;
             }
 
+           for (size_t i = 0; i < halRequest.num_output_buffers; i++) {
+               int32_t outputStreamId =
+                       Camera3Stream::cast(halRequest.output_buffers[i].stream)->getId();
+               mLatestRequestInfo.outputStreamIds.emplace(outputStreamId);
+           }
+
             parent->monitorMetadata(TagMonitor::REQUEST,
                     halRequest.frame_number,
-                    0, mLatestRequest, mLatestPhysicalRequest, halRequest.output_buffers,
+                    0, mLatestRequestInfo.requestSettings,
+                    mLatestRequestInfo.physicalRequestSettings, halRequest.output_buffers,
                     halRequest.num_output_buffers, inputStreamId);
         }
     }
     if (parent != nullptr) {
-        parent->collectRequestStats(halRequest.frame_number, mLatestRequest);
+        parent->collectRequestStats(halRequest.frame_number, mLatestRequestInfo.requestSettings);
     }
 
     if (halRequest.settings != nullptr) {
@@ -4170,13 +4205,13 @@
     return OK;
 }
 
-CameraMetadata Camera3Device::RequestThread::getLatestRequest() const {
+Camera3Device::LatestRequestInfo Camera3Device::RequestThread::getLatestRequestInfo() const {
     ATRACE_CALL();
     Mutex::Autolock al(mLatestRequestMutex);
 
     ALOGV("RequestThread::%s", __FUNCTION__);
 
-    return mLatestRequest;
+    return mLatestRequestInfo;
 }
 
 bool Camera3Device::RequestThread::isStreamPending(
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index 62ae38c..6f87ca3 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -736,12 +736,19 @@
     virtual void applyMaxBatchSizeLocked(
             RequestList* requestList, const sp<camera3::Camera3OutputStreamInterface>& stream) = 0;
 
+    struct LatestRequestInfo {
+        CameraMetadata requestSettings;
+        std::unordered_map<std::string, CameraMetadata> physicalRequestSettings;
+        int32_t inputStreamId = -1;
+        std::set<int32_t> outputStreamIds;
+    };
+
     /**
      * Get the last request submitted to the hal by the request thread.
      *
      * Must be called with mLock held.
      */
-    virtual CameraMetadata getLatestRequestLocked();
+    virtual LatestRequestInfo getLatestRequestInfoLocked();
 
     virtual status_t injectionCameraInitialize(const std::string &injectCamId,
             sp<CameraProviderManager> manager) = 0;
@@ -992,7 +999,7 @@
          * Get the latest request that was sent to the HAL
          * with process_capture_request.
          */
-        CameraMetadata getLatestRequest() const;
+        LatestRequestInfo getLatestRequestInfo() const;
 
         /**
          * Returns true if the stream is a target of any queued or repeating
@@ -1192,8 +1199,7 @@
         // android.request.id for latest process_capture_request
         int32_t            mLatestRequestId;
         int32_t            mLatestFailedRequestId;
-        CameraMetadata     mLatestRequest;
-        std::unordered_map<std::string, CameraMetadata> mLatestPhysicalRequest;
+        LatestRequestInfo mLatestRequestInfo;
 
         typedef KeyedVector<uint32_t/*tag*/, RequestTrigger> TriggerMap;
         Mutex              mTriggerMutex;
diff --git a/services/camera/libcameraservice/device3/Camera3InputStream.cpp b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
index 283322e..f9b6037 100644
--- a/services/camera/libcameraservice/device3/Camera3InputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3InputStream.cpp
@@ -18,10 +18,12 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
 #include <utils/Log.h>
 #include <utils/Trace.h>
-#include <camera/StringUtils.h>
+
 #include "Camera3InputStream.h"
 
 namespace android {
@@ -239,9 +241,15 @@
     mLastTimestamp = 0;
 
     if (mConsumer.get() == 0) {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> bufferItemConsumer = new BufferItemConsumer(mUsage);
+        sp<IGraphicBufferProducer> producer =
+                bufferItemConsumer->getSurface()->getIGraphicBufferProducer();
+#else
         sp<IGraphicBufferProducer> producer;
         sp<IGraphicBufferConsumer> consumer;
         BufferQueue::createBufferQueue(&producer, &consumer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         int minUndequeuedBuffers = 0;
         res = producer->query(NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBuffers);
@@ -271,11 +279,19 @@
             camera_stream::max_buffers : minBufs;
         // TODO: somehow set the total buffer count when producer connects?
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        mConsumer = bufferItemConsumer;
+        mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
+        mConsumer->setMaxAcquiredBufferCount(mTotalBufferCount);
+
+        mProducer = mConsumer->getSurface()->getIGraphicBufferProducer();
+#else
         mConsumer = new BufferItemConsumer(consumer, mUsage,
                                            mTotalBufferCount);
         mConsumer->setName(String8::format("Camera3-InputStream-%d", mId));
 
         mProducer = producer;
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
         mConsumer->setBufferFreedListener(this);
     }
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
index 3cd4543..54d55a1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.cpp
@@ -555,8 +555,8 @@
     // Configure consumer-side ANativeWindow interface. The listener may be used
     // to notify buffer manager (if it is used) of the returned buffers.
     res = mConsumer->connect(NATIVE_WINDOW_API_CAMERA,
-            /*reportBufferRemoval*/true,
-            /*listener*/mBufferProducerListener);
+            /*listener*/mBufferProducerListener,
+            /*reportBufferRemoval*/true);
     if (res != OK) {
         ALOGE("%s: Unable to connect to native window for stream %d",
                 __FUNCTION__, mId);
diff --git a/services/camera/libcameraservice/device3/Camera3OutputStream.h b/services/camera/libcameraservice/device3/Camera3OutputStream.h
index 8a93ed8..f8b78c1 100644
--- a/services/camera/libcameraservice/device3/Camera3OutputStream.h
+++ b/services/camera/libcameraservice/device3/Camera3OutputStream.h
@@ -193,6 +193,7 @@
             virtual void onBufferReleased();
             virtual bool needsReleaseNotify() { return mNeedsReleaseNotify; }
             virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& buffers);
+            virtual void onBufferDetached(int /*slot*/) override {};
 
         private:
             wp<Camera3OutputStream> mParent;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 255b4f2..77c037a 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -20,12 +20,13 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
+#include <gui/BufferQueue.h>
 #include <gui/IGraphicBufferConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
-#include <gui/BufferQueue.h>
 #include <gui/Surface.h>
-#include <camera/StringUtils.h>
 
 #include <ui/GraphicBuffer.h>
 
@@ -81,18 +82,28 @@
         }
     }
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     // Create BufferQueue for input
     BufferQueue::createBufferQueue(&mProducer, &mConsumer);
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     // Allocate 1 extra buffer to handle the case where all buffers are detached
     // from input, and attached to the outputs. In this case, the input queue's
     // dequeueBuffer can still allocate 1 extra buffer before being blocked by
     // the output's attachBuffer().
     mMaxConsumerBuffers++;
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mBufferItemConsumer = new BufferItemConsumer(consumerUsage, mMaxConsumerBuffers);
+#else
     mBufferItemConsumer = new BufferItemConsumer(mConsumer, consumerUsage, mMaxConsumerBuffers);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     if (mBufferItemConsumer == nullptr) {
         return NO_MEMORY;
     }
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    mProducer = mBufferItemConsumer->getSurface()->getIGraphicBufferProducer();
+    mConsumer = mBufferItemConsumer->getIGraphicBufferConsumer();
+#endif  //  COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mConsumer->setConsumerName(toString8(mConsumerName));
 
     *consumer = new Surface(mProducer);
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
index 1feb4a0..43f12fb 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.h
@@ -22,7 +22,7 @@
 #include <camera/CameraMetadata.h>
 
 #include <gui/IConsumerListener.h>
-#include <gui/IProducerListener.h>
+#include <gui/Surface.h>
 #include <gui/BufferItemConsumer.h>
 
 #include <utils/Condition.h>
@@ -159,7 +159,7 @@
     // the IProducerListener::onBufferReleased callback is associated with. We
     // create one of these per output BufferQueue, and then pass the producer
     // into onBufferReleasedByOutput above.
-    class OutputListener : public BnProducerListener,
+    class OutputListener : public SurfaceListener,
                            public IBinder::DeathRecipient {
     public:
         OutputListener(wp<Camera3StreamSplitter> splitter,
@@ -168,6 +168,9 @@
 
         // From IProducerListener
         void onBufferReleased() override;
+        bool needsReleaseNotify() override { return true; };
+        void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>& /*buffers*/) override {};
+        void onBufferDetached(int /*slot*/) override {}
 
         // From IBinder::DeathRecipient
         void binderDied(const wp<IBinder>& who) override;
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
index 83caa00..a04406e 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.cpp
@@ -18,10 +18,16 @@
 #define ATRACE_TAG ATRACE_TAG_CAMERA
 //#define LOG_NDEBUG 0
 
+#include <com_android_internal_camera_flags.h>
+
 #include <utils/Log.h>
 
 #include "PreviewFrameSpacer.h"
 #include "Camera3OutputStream.h"
+#include "utils/SchedulingPolicyUtils.h"
+#include "utils/Utils.h"
+
+namespace flags = com::android::internal::camera::flags;
 
 namespace android {
 
@@ -129,6 +135,24 @@
     mLastCameraReadoutTime = bufferHolder.readoutTimestamp;
 }
 
+status_t PreviewFrameSpacer::run(const char* name, int32_t priority, size_t stack) {
+    auto ret = Thread::run(name, priority, stack);
+    if (flags::bump_preview_frame_space_priority()) {
+        // Boost priority of the preview frame spacer thread to SCHED_FIFO.
+        pid_t previewFrameSpacerTid = getTid();
+        auto res = SchedulingPolicyUtils::requestPriorityDirect(getpid(), previewFrameSpacerTid,
+                RunThreadWithRealtimePriority::kRequestThreadPriority);
+        if (res != OK) {
+            ALOGW("Can't set realtime priority for preview frame spacer thread: %s (%d)",
+                    strerror(-res), res);
+        } else {
+            ALOGV("Set real time priority for preview frame spacer thread (tid %d)",
+                    previewFrameSpacerTid);
+        }
+    }
+    return ret;
+}
+
 }; // namespace camera3
 
 }; // namespace android
diff --git a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
index f46de3d..ab85189 100644
--- a/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
+++ b/services/camera/libcameraservice/device3/PreviewFrameSpacer.h
@@ -58,6 +58,7 @@
 
     bool threadLoop() override;
     void requestExit() override;
+    status_t run(const char* name, int32_t priority = PRIORITY_DEFAULT, size_t stack = 0) override;
 
   private:
     // structure holding cached preview buffer info
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
index 5dbfb36..c968e44 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.cpp
@@ -18,12 +18,14 @@
 #define LOG_TAG "RingBufferConsumer"
 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
 
+#include <com_android_graphics_libgui_flags.h>
 #include <inttypes.h>
 
 #include <utils/Log.h>
 
-#include <gui/RingBufferConsumer.h>
 #include <camera/StringUtils.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/RingBufferConsumer.h>
 
 #define BI_LOGV(x, ...) ALOGV("[%s] " x, mName.c_str(), ##__VA_ARGS__)
 #define BI_LOGD(x, ...) ALOGD("[%s] " x, mName.c_str(), ##__VA_ARGS__)
@@ -38,13 +40,14 @@
 
 namespace android {
 
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+RingBufferConsumer::RingBufferConsumer(uint64_t consumerUsage, int bufferCount)
+    : ConsumerBase(), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#else
 RingBufferConsumer::RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer,
-        uint64_t consumerUsage,
-        int bufferCount) :
-    ConsumerBase(consumer),
-    mBufferCount(bufferCount),
-    mLatestTimestamp(0)
-{
+                                       uint64_t consumerUsage, int bufferCount)
+    : ConsumerBase(consumer), mBufferCount(bufferCount), mLatestTimestamp(0) {
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
     mConsumer->setConsumerUsageBits(consumerUsage);
     mConsumer->setMaxAcquiredBufferCount(bufferCount);
 
@@ -317,7 +320,9 @@
 
         mLatestTimestamp = item.mTimestamp;
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_RING_BUFFER)
         item.mGraphicBuffer = mSlots[item.mSlot].mGraphicBuffer;
+#endif
     } // end of mMutex lock
 
     ConsumerBase::onFrameAvailable(item);
diff --git a/services/camera/libcameraservice/gui/RingBufferConsumer.h b/services/camera/libcameraservice/gui/RingBufferConsumer.h
index 2e523d1..9fdc996 100644
--- a/services/camera/libcameraservice/gui/RingBufferConsumer.h
+++ b/services/camera/libcameraservice/gui/RingBufferConsumer.h
@@ -17,9 +17,10 @@
 #ifndef ANDROID_GUI_RINGBUFFERCONSUMER_H
 #define ANDROID_GUI_RINGBUFFERCONSUMER_H
 
+#include <com_android_graphics_libgui_flags.h>
 #include <gui/BufferItem.h>
-#include <gui/ConsumerBase.h>
 #include <gui/BufferQueue.h>
+#include <gui/ConsumerBase.h>
 
 #include <utils/List.h>
 
@@ -58,8 +59,12 @@
     // the consumer usage flags passed to the graphics allocator. The
     // bufferCount parameter specifies how many buffers can be pinned for user
     // access at the same time.
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    RingBufferConsumer(uint64_t consumerUsage, int bufferCount);
+#else
     RingBufferConsumer(const sp<IGraphicBufferConsumer>& consumer, uint64_t consumerUsage,
             int bufferCount);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 
     virtual ~RingBufferConsumer();
 
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index d3b2a51..59e892f 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -26,6 +26,7 @@
 #include <hidl/HidlTransportSupport.h>
 
 #include <camera/CameraUtils.h>
+#include <utils/AttributionAndPermissionUtils.h>
 #include <utils/Utils.h>
 
 namespace android {
@@ -68,10 +69,15 @@
                                             getCameraCharacteristics_cb _hidl_cb) {
     android::CameraMetadata cameraMetadata;
     HStatus status = HStatus::NO_ERROR;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
     binder::Status serviceRet =
         mAidlICameraService->getCameraCharacteristics(cameraId,
                 /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, 0, &cameraMetadata);
+                clientAttribution, 0, &cameraMetadata);
     HCameraMetadata hidlMetadata;
     if (!serviceRet.isOk()) {
         switch(serviceRet.serviceSpecificErrorCode()) {
@@ -119,11 +125,17 @@
         return Void();
     }
     sp<hardware::camera2::ICameraDeviceCallbacks> callbacks = hybridCallbacks;
+    AttributionSourceState clientAttribution =
+            AttributionAndPermissionUtils::buildAttributionSource(
+                    hardware::ICameraService::USE_CALLING_PID,
+                    hardware::ICameraService::USE_CALLING_UID,
+                    kDefaultDeviceId);
+    clientAttribution.packageName = "";
+    clientAttribution.attributionTag = std::nullopt;
     binder::Status serviceRet = mAidlICameraService->connectDevice(
-            callbacks, cameraId, std::string(), {},
-            hardware::ICameraService::USE_CALLING_UID, 0/*oomScoreOffset*/,
+            callbacks, cameraId, 0/*oomScoreOffset*/,
             /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
-            kDefaultDeviceId, /*devicePolicy*/0, /*out*/&deviceRemote);
+            clientAttribution, /*devicePolicy*/0, /*out*/&deviceRemote);
     HStatus status = HStatus::NO_ERROR;
     if (!serviceRet.isOk()) {
         ALOGE("%s: Unable to connect to camera device", __FUNCTION__);
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
index c710671..53234f0 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/Android.bp
@@ -35,6 +35,7 @@
         "libmedia_headers",
     ],
     shared_libs: [
+        "framework-permission-aidl-cpp",
         "libbinder",
         "libbase",
         "libutils",
@@ -85,7 +86,7 @@
         "camera_service_fuzzer.cpp",
     ],
     defaults: [
-        "camera_service_fuzzer_defaults"
+        "camera_service_fuzzer_defaults",
     ],
 }
 
diff --git a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
index bce0faf..718e1d6 100644
--- a/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
+++ b/services/camera/libcameraservice/libcameraservice_fuzzer/camera_service_fuzzer.cpp
@@ -22,13 +22,15 @@
 //#define LOG_NDEBUG 0
 
 #include <CameraService.h>
-#include <device3/Camera3StreamInterface.h>
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
-#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/ICameraServiceListener.h>
+#include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
 #include <android/hardware/camera2/ICameraDeviceUser.h>
 #include <camera/CameraUtils.h>
 #include <camera/camera2/OutputConfiguration.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <device3/Camera3StreamInterface.h>
 #include <gui/BufferItemConsumer.h>
 #include <gui/IGraphicBufferProducer.h>
 #include <gui/Surface.h>
@@ -219,7 +221,9 @@
     } else {
         camType = kCamType[mFuzzedDataProvider->ConsumeBool()];
     }
-    mCameraService->getNumberOfCameras(camType, kDefaultDeviceId, /*devicePolicy*/0, &mNumCameras);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    mCameraService->getNumberOfCameras(camType, clientAttribution, /*devicePolicy*/0, &mNumCameras);
 }
 
 void CameraFuzzer::getCameraInformation(int32_t cameraId) {
@@ -238,14 +242,17 @@
     hardware::camera2::params::VendorTagDescriptorCache cache;
     mCameraService->getCameraVendorTagCache(&cache);
 
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+
     CameraInfo cameraInfo;
-    mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, kDefaultDeviceId,
+    mCameraService->getCameraInfo(cameraId, ROTATION_OVERRIDE_NONE, clientAttribution,
             /*devicePolicy*/0, &cameraInfo);
 
     CameraMetadata metadata;
     mCameraService->getCameraCharacteristics(cameraIdStr,
             /*targetSdkVersion*/__ANDROID_API_FUTURE__, ROTATION_OVERRIDE_NONE,
-            kDefaultDeviceId, /*devicePolicy*/0, &metadata);
+            clientAttribution, /*devicePolicy*/0, &metadata);
 }
 
 void CameraFuzzer::invokeCameraSound() {
@@ -327,13 +334,15 @@
     std::string cameraIdStr = std::to_string(cameraId);
     sp<IBinder> binder = new BBinder;
 
-    mCameraService->setTorchMode(cameraIdStr, true, binder, kDefaultDeviceId, /*devicePolicy*/0);
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    mCameraService->setTorchMode(cameraIdStr, true, binder, clientAttribution, /*devicePolicy*/0);
     ALOGV("Turned torch on.");
     int32_t torchStrength = rand() % 5 + 1;
     ALOGV("Changing torch strength level to %d", torchStrength);
     mCameraService->turnOnTorchWithStrengthLevel(cameraIdStr, torchStrength, binder,
-            kDefaultDeviceId, /*devicePolicy*/0);
-    mCameraService->setTorchMode(cameraIdStr, false, binder, kDefaultDeviceId, /*devicePolicy*/0);
+            clientAttribution, /*devicePolicy*/0);
+    mCameraService->setTorchMode(cameraIdStr, false, binder, clientAttribution, /*devicePolicy*/0);
     ALOGV("Turned torch off.");
 }
 
@@ -349,13 +358,15 @@
     ::android::binder::Status rc;
     sp<ICamera> cameraDevice;
 
-    rc = mCameraService->connect(this, cameraId, std::string(),
-                                 android::CameraService::USE_CALLING_UID,
-                                 android::CameraService::USE_CALLING_PID,
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+    clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+    rc = mCameraService->connect(this, cameraId,
                                  /*targetSdkVersion*/ __ANDROID_API_FUTURE__,
                                  ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
                                  /*forceSlowJpegMode*/false,
-                                 kDefaultDeviceId, /*devicePolicy*/0, &cameraDevice);
+                                 clientAttribution, /*devicePolicy*/0, &cameraDevice);
     if (!rc.isOk()) {
         // camera not connected
         return;
@@ -590,15 +601,37 @@
     for (auto s : statuses) {
         sp<TestCameraDeviceCallbacks> callbacks(new TestCameraDeviceCallbacks());
         sp<hardware::camera2::ICameraDeviceUser> device;
-        mCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+
+        AttributionSourceState clientAttribution;
+        clientAttribution.deviceId = kDefaultDeviceId;
+        clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+        clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+        mCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 ROTATION_OVERRIDE_OVERRIDE_TO_PORTRAIT,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, &device);
         if (device == nullptr) {
             continue;
         }
         device->beginConfigure();
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+        sp<BufferItemConsumer> opaqueConsumer = new BufferItemConsumer(
+                GRALLOC_USAGE_SW_READ_NEVER, /*maxImages*/ 8, /*controlledByApp*/ true);
+        opaqueConsumer->setName(String8("Roger"));
+
+        // Set to VGA dimension for default, as that is guaranteed to be present
+        opaqueConsumer->setDefaultBufferSize(640, 480);
+        opaqueConsumer->setDefaultBufferFormat(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
+
+        sp<Surface> surface = opaqueConsumer->getSurface();
+
+        std::string noPhysicalId;
+        size_t rotations = sizeof(kRotations) / sizeof(int32_t) - 1;
+        sp<IGraphicBufferProducer> igbp = surface->getIGraphicBufferProducer();
+        OutputConfiguration output(
+                igbp, kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
+                noPhysicalId);
+#else
         sp<IGraphicBufferProducer> gbProducer;
         sp<IGraphicBufferConsumer> gbConsumer;
         BufferQueue::createBufferQueue(&gbProducer, &gbConsumer);
@@ -617,6 +650,7 @@
         OutputConfiguration output(gbProducer,
                 kRotations[mFuzzedDataProvider->ConsumeIntegralInRange<size_t>(0, rotations)],
                 noPhysicalId);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
         int streamId;
         device->createStream(output, &streamId);
         CameraMetadata sessionParams;
diff --git a/services/camera/libcameraservice/tests/Android.bp b/services/camera/libcameraservice/tests/Android.bp
index 55e2c9d..bbc10dc 100644
--- a/services/camera/libcameraservice/tests/Android.bp
+++ b/services/camera/libcameraservice/tests/Android.bp
@@ -30,7 +30,7 @@
         "ExifUtilsTest.cpp",
         "NV12Compressor.cpp",
         "RotateAndCropMapperTest.cpp",
-	"SessionStatsBuilderTest.cpp",
+        "SessionStatsBuilderTest.cpp",
         "ZoomRatioTest.cpp",
     ],
 
@@ -76,7 +76,7 @@
 
     defaults: [
         "libcameraservice_deps",
-	"cameraservice_test_hostsupported"
+        "cameraservice_test_hostsupported",
     ],
 
     // Only include libs that can't be run host-side here
@@ -84,6 +84,7 @@
         "libcutils",
         "libhidlbase",
         "libcamera_client",
+        "libgui",
         "libui",
         "android.companion.virtualdevice.flags-aconfig-cc",
         "android.hardware.camera.common@1.0",
@@ -108,6 +109,7 @@
 
     // Only include sources that can't be run host-side here
     srcs: [
+        "Camera3StreamSplitterTest.cpp",
         "CameraPermissionsTest.cpp",
         "CameraProviderManagerTest.cpp",
     ],
@@ -118,13 +120,13 @@
     name: "cameraservice_test_host",
 
     defaults: [
-        "cameraservice_test_hostsupported"
+        "cameraservice_test_hostsupported",
     ],
 
     include_dirs: [
         "frameworks/av/camera/include",
         "frameworks/av/camera/include/camera",
-        "frameworks/native/libs/binder/include_activitymanager"
+        "frameworks/native/libs/binder/include_activitymanager",
     ],
 
     // Only include libs that can't be run device-side here
diff --git a/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
new file mode 100644
index 0000000..3d49ae5
--- /dev/null
+++ b/services/camera/libcameraservice/tests/Camera3StreamSplitterTest.cpp
@@ -0,0 +1,169 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3StreamSplitterTest"
+// #define LOG_NDEBUG 0
+
+#include "../device3/Camera3StreamSplitter.h"
+
+#include <android/hardware_buffer.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/BufferItemConsumer.h>
+#include <gui/IGraphicBufferConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <gui/Surface.h>
+#include <ui/Fence.h>
+#include <ui/GraphicBuffer.h>
+#include <ui/GraphicBufferAllocator.h>
+#include <ui/PixelFormat.h>
+
+#include <system/window.h>
+#include <vndk/window.h>
+
+#include <gtest/gtest.h>
+
+using namespace android;
+
+namespace {
+
+uint64_t kConsumerUsage = AHARDWAREBUFFER_USAGE_CAMERA_READ;
+uint64_t kProducerUsage = AHARDWAREBUFFER_USAGE_CAMERA_READ;
+size_t kHalMaxBuffers = 3;
+uint32_t kWidth = 640;
+uint32_t kHeight = 480;
+PixelFormat kFormat = HAL_PIXEL_FORMAT_YCBCR_420_888;
+int64_t kDynamicRangeProfile = 0;
+
+std::tuple<sp<BufferItemConsumer>, sp<Surface>> createConsumerAndSurface() {
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+    sp<BufferItemConsumer> consumer = sp<BufferItemConsumer>::make(kConsumerUsage);
+    return {consumer, consumer->getSurface()};
+#else
+    sp<IGraphicBufferProducer> producer;
+    sp<IGraphicBufferConsumer> consumer;
+    BufferQueue::createBufferQueue(&producer, &consumer);
+
+    return {sp<BufferItemConsumer>::make(consumer, kConsumerUsage), sp<Surface>::make(producer)};
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+}
+
+class Camera3StreamSplitterTest : public testing::Test {
+  public:
+    void SetUp() override { mSplitter = sp<Camera3StreamSplitter>::make(); }
+
+  protected:
+    sp<Camera3StreamSplitter> mSplitter;
+};
+
+class TestSurfaceListener : public SurfaceListener {
+  public:
+    virtual void onBufferReleased() override { mNumBuffersReleased++; }
+    virtual bool needsReleaseNotify() { return true; }
+    virtual void onBufferDetached(int) override {}
+    virtual void onBuffersDiscarded(const std::vector<sp<GraphicBuffer>>&) override {};
+
+    uint32_t mNumBuffersReleased = 0;
+};
+
+class TestConsumerListener : public BufferItemConsumer::FrameAvailableListener {
+  public:
+    TestConsumerListener(const wp<BufferItemConsumer>& consumer) : mConsumer(consumer) {}
+
+    virtual void onFrameAvailable(const BufferItem&) {
+        sp<BufferItemConsumer> consumer = mConsumer.promote();
+        EXPECT_NE(nullptr, consumer);
+
+        BufferItem item;
+        EXPECT_EQ(OK, consumer->acquireBuffer(&item, 0));
+        mNumBuffersAcquired++;
+        EXPECT_EQ(OK, consumer->releaseBuffer(item, Fence::NO_FENCE));
+    }
+    virtual void onFrameReplaced(const BufferItem&) {}
+    virtual void onFrameDequeued(const uint64_t) {}
+    virtual void onFrameCancelled(const uint64_t) {}
+    virtual void onFrameDetached(const uint64_t) {}
+
+    wp<BufferItemConsumer> mConsumer;
+    uint32_t mNumBuffersAcquired = 0;
+};
+
+}  // namespace
+
+TEST_F(Camera3StreamSplitterTest, TestWithoutSurfaces_NoBuffersConsumed) {
+    sp<Surface> consumer;
+    EXPECT_EQ(OK, mSplitter->connect({}, kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
+                                     kHeight, kFormat, &consumer, kDynamicRangeProfile));
+
+    sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
+    EXPECT_EQ(OK, consumer->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
+
+    sp<GraphicBuffer> buffer = new GraphicBuffer(kWidth, kHeight, kFormat, kProducerUsage);
+    EXPECT_EQ(OK, consumer->attachBuffer(buffer->getNativeBuffer()));
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK,
+              ANativeWindow_queueBuffer(consumer.get(), buffer->getNativeBuffer(), /*fenceFd*/ -1));
+
+    EXPECT_EQ(0u, surfaceListener->mNumBuffersReleased);
+}
+
+TEST_F(Camera3StreamSplitterTest, TestProcessSingleBuffer) {
+    //
+    // Set up output consumers:
+    //
+    constexpr auto kSurfaceId1 = 1;
+    auto [bufferItemConsumer1, surface1] = createConsumerAndSurface();
+    sp<TestConsumerListener> consumerListener1 =
+            sp<TestConsumerListener>::make(bufferItemConsumer1);
+    bufferItemConsumer1->setFrameAvailableListener(consumerListener1);
+
+    constexpr auto kSurfaceId2 = 2;
+    auto [bufferItemConsumer2, surface2] = createConsumerAndSurface();
+    sp<TestConsumerListener> consumerListener2 =
+            sp<TestConsumerListener>::make(bufferItemConsumer2);
+    bufferItemConsumer2->setFrameAvailableListener(consumerListener2);
+
+    //
+    // Connect it to the splitter, get the input surface, and set it up:
+    //
+    sp<Surface> inputSurface;
+    EXPECT_EQ(OK, mSplitter->connect({{kSurfaceId1, surface1}, {kSurfaceId2, surface2}},
+                                     kConsumerUsage, kProducerUsage, kHalMaxBuffers, kWidth,
+                                     kHeight, kFormat, &inputSurface, kDynamicRangeProfile));
+    sp<TestSurfaceListener> surfaceListener = sp<TestSurfaceListener>::make();
+    EXPECT_EQ(OK, inputSurface->connect(NATIVE_WINDOW_API_CAMERA, surfaceListener, false));
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK, inputSurface->getIGraphicBufferProducer()->allowAllocation(false));
+
+    //
+    // Create a buffer to use:
+    //
+    sp<GraphicBuffer> singleBuffer = new GraphicBuffer(kWidth, kHeight, kFormat, kProducerUsage);
+    EXPECT_NE(nullptr, singleBuffer);
+    mSplitter->attachBufferToOutputs(singleBuffer->getNativeBuffer(), {kSurfaceId1, kSurfaceId2});
+
+    //
+    // Verify that when we attach the buffer, it's processed appropriately:
+    //
+    EXPECT_EQ(OK, inputSurface->attachBuffer(singleBuffer->getNativeBuffer()));
+    EXPECT_EQ(OK, mSplitter->getOnFrameAvailableResult());
+    // TODO: Do this with the surface itself once the API is available.
+    EXPECT_EQ(OK, ANativeWindow_queueBuffer(inputSurface.get(), singleBuffer->getNativeBuffer(),
+                                            /*fenceFd*/ -1));
+
+    EXPECT_EQ(1u, consumerListener1->mNumBuffersAcquired);
+    EXPECT_EQ(1u, consumerListener2->mNumBuffersAcquired);
+    EXPECT_EQ(1u, surfaceListener->mNumBuffersReleased);
+}
diff --git a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
index feb5540..50aeaca 100644
--- a/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
+++ b/services/camera/libcameraservice/tests/CameraPermissionsTest.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <android/content/AttributionSourceState.h>
 #include <android/hardware/BnCameraServiceListener.h>
 #include <android/hardware/BnCameraServiceProxy.h>
 #include <android/hardware/camera2/BnCameraDeviceCallbacks.h>
@@ -223,6 +224,11 @@
 // Test that camera connections fail with ERROR_DISABLED when the camera is disabled via device
 // policy, and succeed when it isn't.
 TEST_F(CameraPermissionsTest, TestCameraDisabled) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+    clientAttribution.pid = android::CameraService::USE_CALLING_PID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -233,11 +239,10 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(!status.isOk()) << "connectDevice returned OK status";
         ASSERT_EQ(status.serviceSpecificErrorCode(), hardware::ICameraService::ERROR_DISABLED)
@@ -249,11 +254,10 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> device;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &device);
+                clientAttribution, /*devicePolicy*/0, &device);
         AutoDisconnectDevice autoDisconnect(device);
         ASSERT_TRUE(status.isOk());
     }
@@ -261,6 +265,10 @@
 
 // Test that consecutive camera connections succeed.
 TEST_F(CameraPermissionsTest, TestConsecutiveConnections) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -270,20 +278,18 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
+                clientAttribution, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
+                clientAttribution, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
@@ -293,6 +299,10 @@
 // Test that consecutive camera connections succeed even when a nonzero oomScoreOffset is provided
 // in the second call.
 TEST_F(CameraPermissionsTest, TestConflictingOomScoreOffset) {
+    AttributionSourceState clientAttribution;
+    clientAttribution.deviceId = kDefaultDeviceId;
+    clientAttribution.uid = android::CameraService::USE_CALLING_UID;
+
     std::vector<hardware::CameraStatus> statuses;
     sp<TestCameraServiceListener> serviceListener = new TestCameraServiceListener();
     sCameraService->addListenerTest(serviceListener, &statuses);
@@ -302,20 +312,18 @@
         sp<TestCameraDeviceCallbacks> callbacks = new TestCameraDeviceCallbacks();
         sp<hardware::camera2::ICameraDeviceUser> deviceA, deviceB;
         binder::Status status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 0/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                0/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceA);
+                clientAttribution, /*devicePolicy*/0, &deviceA);
         AutoDisconnectDevice autoDisconnectA(deviceA);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
         status =
-                sCameraService->connectDevice(callbacks, s.cameraId, std::string(), {},
-                android::CameraService::USE_CALLING_UID, 1/*oomScoreDiff*/,
-                /*targetSdkVersion*/__ANDROID_API_FUTURE__,
+                sCameraService->connectDevice(callbacks, s.cameraId,
+                1/*oomScoreDiff*/, /*targetSdkVersion*/__ANDROID_API_FUTURE__,
                 hardware::ICameraService::ROTATION_OVERRIDE_NONE,
-                kDefaultDeviceId, /*devicePolicy*/0, &deviceB);
+                clientAttribution, /*devicePolicy*/0, &deviceB);
         AutoDisconnectDevice autoDisconnectB(deviceB);
         ASSERT_TRUE(status.isOk()) << "Exception code " << status.exceptionCode() <<
                 " service specific error code " << status.serviceSpecificErrorCode();
diff --git a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
index 4f238ab..4daab0f 100644
--- a/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
+++ b/services/camera/libcameraservice/utils/AttributionAndPermissionUtils.h
@@ -41,6 +41,20 @@
         mCameraService = cameraService;
     }
 
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
+        AttributionSourceState attributionSource{};
+        attributionSource.pid = callingPid;
+        attributionSource.uid = callingUid;
+        return attributionSource;
+    }
+
+    static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
+            int32_t deviceId) {
+        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
+        attributionSource.deviceId = deviceId;
+        return attributionSource;
+    }
+
     // Utilities handling Binder calling identities (previously in CameraThreadState)
     virtual int getCallingUid();
     virtual int getCallingPid();
@@ -123,17 +137,13 @@
             : mAttributionAndPermissionUtils(attributionAndPermissionUtils) { }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid) {
-        AttributionSourceState attributionSource{};
-        attributionSource.pid = callingPid;
-        attributionSource.uid = callingUid;
-        return attributionSource;
+        return AttributionAndPermissionUtils::buildAttributionSource(callingPid, callingUid);
     }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
             int32_t deviceId) {
-        AttributionSourceState attributionSource = buildAttributionSource(callingPid, callingUid);
-        attributionSource.deviceId = deviceId;
-        return attributionSource;
+        return AttributionAndPermissionUtils::buildAttributionSource(
+                callingPid, callingUid, deviceId);
     }
 
     static AttributionSourceState buildAttributionSource(int callingPid, int callingUid,
diff --git a/services/camera/virtualcamera/Android.bp b/services/camera/virtualcamera/Android.bp
index fc186fb..7ece0cb 100644
--- a/services/camera/virtualcamera/Android.bp
+++ b/services/camera/virtualcamera/Android.bp
@@ -65,14 +65,7 @@
 cc_library_static {
     name: "libvirtualcamera",
     srcs: [
-        "VirtualCameraProvider.cc",
-        "VirtualCameraDevice.cc",
-        "VirtualCameraSession.cc",
-        "VirtualCameraStream.cc",
-        "VirtualCameraService.cc",
-        "VirtualCameraSessionContext.cc",
-        "VirtualCameraTestInstance.cc",
-        "VirtualCameraRenderThread.cc",
+        "*.cc",
     ],
     defaults: [
         "libvirtualcamera_defaults",
diff --git a/services/camera/virtualcamera/README.md b/services/camera/virtualcamera/README.md
new file mode 100644
index 0000000..04b4811
--- /dev/null
+++ b/services/camera/virtualcamera/README.md
@@ -0,0 +1,164 @@
+# Virtual Camera
+
+The virtual camera feature allows 3rd party application to expose a remote or
+virtual camera to the standard Android camera frameworks (Camera2/CameraX, NDK,
+camera1).
+
+The stack is composed into 4 different parts:
+
+1.  The **Virtual Camera Service** (this directory), implementing the Camera HAL
+    and acts as an interface between the Android Camera Server and the *Virtual
+    Camera Owner* (via the VirtualDeviceManager APIs).
+
+2.  The **VirtualDeviceManager** running in the system process and handling the
+    communication between the Virtual Camera service and the Virtual Camera
+    owner
+
+3.  The **Virtual Camera Owner**, the client application declaring the Virtual
+    Camera and handling the production of image data. We will also refer to this
+    part as the **producer**
+
+4.  The **Consumer Application**, the client application consuming camera data,
+    which can be any application using the camera APIs
+
+This document describes the functionalities of the *Virtual Camera Service*
+
+## Before reading
+
+The service implements the Camera HAL. It's best to have a bit of an
+understanding of how it works by reading the
+[HAL documentation first](https://source.android.com/docs/core/camera)
+
+![](https://source.android.com/static/docs/core/camera/images/ape_fwk_camera2.png)
+
+The HAL implementations are declared in: -
+[VirtualCameraDevice](./VirtualCameraDevice.h) -
+[VirtualCameraProvider](./VirtualCameraProvider.h) -
+[VirtualCameraSession](./VirtualCameraSession.h)
+
+## Current supported features
+
+Virtual Cameras report `EXTERNAL`
+[hardware level](https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL)
+but some
+[functionalities of `EXTERNAL`](https://developer.android.com/reference/android/hardware/camera2/CameraMetadata#INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL)
+hardware level are not fully supported.
+
+Here is a list of supported features - Single input multiple output stream and
+capture:
+
+-   Support for YUV and JPEG
+
+Notable missing features:
+
+-   Support for auto 3A (AWB, AE, AF): virtual camera will announce convergence
+    of 3A algorithm even though it can't receive any information about this from
+    the owner.
+
+-   No flash/torch support
+
+## Overview
+
+Graphic data are exchanged using the Surface infrastructure. Like any other
+Camera HAL, the Surfaces to write data into are received from the client.
+Virtual Camera exposes a **different** Surface onto which the owner can write
+data. In the middle, we use an EGL Texture which adapts (if needed) the producer
+data to the required consumer format (scaling only for now, but we might also
+add support for rotation and cropping in the future).
+
+When the client application requires multiple resolutions, the closest one among
+supported resolutions is used for the input data and the image data is down
+scaled for the lower resolutions.
+
+Depending on the type of output, the rendering pipelines change. Here is an
+overview of the YUV and JPEG pipelines.
+
+**YUV Rendering:**
+
+```
+Virtual Device Owner Surface[1] (Producer) --{binds to}--> EGL
+Texture[1] --{renders into}--> Client Surface[1-n] (Consumer)
+```
+
+**JPEG Rendering:**
+
+```
+Virtual Device Owner Surface[1] (Producer) --{binds to}--> EGL
+Texture[1] --{compress data into}--> temporary buffer --{renders into}-->
+Client Surface[1-n] (Consumer)
+```
+
+## Life of a capture request
+
+> Before reading the following, you must understand the concepts of
+> [CaptureRequest](https://developer.android.com/reference/android/hardware/camera2/CaptureRequest)
+> and
+> [OutputConfiguration](https://developer.android.com/reference/android/hardware/camera2/OutputConfiguration).
+
+1.  The consumer creates a session with one or more `Surfaces`
+
+2.  The VirtualCamera owner will receive a call to
+    `VirtualCameraCallback#onStreamConfigured` with a reference to another
+    `Suface` where it can write into.
+
+3.  The consumer will then start sending `CaptureRequests`. The owner will
+    receive a call to `VirtualCameraCallback#onProcessCaptureRequest`, at which
+    points it should write the required data into the surface it previously
+    received. At the same time, a new task will be enqueued in the render thread
+
+4.  The [VirtualCameraRenderThread](./VirtualCameraRenderThread.cc) will consume
+    the enqueued tasks as they come. It will wait for the producer to write into
+    the input Surface (using `Surface::waitForNextFrame`).
+
+    > **Note:** Since the Surface API allows us to wait for the next frame,
+    > there is no need for the producer to notify when the frame is ready by
+    > calling a `processCaptureResult()` equivalent.
+
+5.  The EGL Texture is updated with the content of the Surface.
+
+6.  The EGL Texture renders into the output Surfaces.
+
+7.  The Camera client is notified of the "shutter" event and the `CaptureResult`
+    is sent to the consumer.
+
+## EGL Rendering
+
+### The render thread
+
+The [VirtualCameraRenderThread](./VirtualCameraRenderThread.h) module takes care
+of rendering the input from the owner to the output via the EGL Texture. The
+rendering is done either to a JPEG buffer, which is the BLOB rendering for
+creating a JPEG or to a YUV buffer used mainly for preview Surfaces or video.
+Two EGLPrograms (shaders) defined in [EglProgram](./util/EglProgram.cc) handle
+the rendering of the data.
+
+### Initialization
+
+[EGlDisplayContext](./util/EglDisplayContext.h) initializes the whole EGL
+environment (Display, Surface, Context, and Config).
+
+The EGL Rendering is backed by a
+[ANativeWindow](https://developer.android.com/ndk/reference/group/a-native-window)
+which is just the native counterpart of the
+[Surface](https://developer.android.com/reference/android/view/Surface), which
+itself is the producer side of buffer queue, the consumer being either the
+display (Camera preview) or some encoder (to save the data or send it across the
+network).
+
+### More about OpenGL
+
+To better understand how the EGL rendering works the following resources can be
+used:
+
+Introduction to OpenGL: https://learnopengl.com/
+
+The official documentation of EGL API can be queried at:
+https://www.khronos.org/registry/egl/sdk/docs/man/xhtml/
+
+And using Google search with the following query:
+
+```
+[function name] site:https://registry.khronos.org/EGL/sdk/docs/man/html/
+
+// example: eglSwapBuffers site:https://registry.khronos.org/EGL/sdk/docs/man/html/
+```
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureRequest.h b/services/camera/virtualcamera/VirtualCameraCaptureRequest.h
new file mode 100644
index 0000000..cf5402e
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureRequest.h
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
+
+#include "VirtualCameraDevice.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Struct used to pass request settings in the different part of
+// the virtual camera system.
+struct RequestSettings {
+  // JPEG_QUALITY metadata
+  int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+
+  // JPEG_ORIENTATION metadata
+  int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
+
+  // JPEG_THUMBNAIL_SIZE metadata
+  Resolution thumbnailResolution = Resolution(0, 0);
+
+  // JPEG_THUMBNAIL_QUALITY metadata
+  int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
+
+  // ANDROID_CONTROL_AE_TARGET_FPS_RANGE metadata
+  std::optional<FpsRange> fpsRange;
+
+  // CONTROL_CAPTURE_INTENT metadata
+  camera_metadata_enum_android_control_capture_intent_t captureIntent =
+      VirtualCameraDevice::kDefaultCaptureIntent;
+
+  // JPEG_GPS_LOCATION metadata
+  std::optional<GpsCoordinates> gpsCoordinates;
+
+  // CONTROL_AE_PRECAPTURE_TRIGGER metadata
+  std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
+      aePrecaptureTrigger;
+};
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTUREREQUEST_H
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.cc b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
new file mode 100644
index 0000000..a61f553
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.cc
@@ -0,0 +1,119 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include "VirtualCameraCaptureResult.h"
+
+#include <cstdint>
+
+#include "VirtualCameraCaptureRequest.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+#include "util/MetadataUtil.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+using ::aidl::android::hardware::camera::device::CameraMetadata;
+namespace {
+// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
+// This roughly corresponds to frame latency, we set to
+// documented minimum of 2.
+static constexpr uint8_t kPipelineDepth = 2;
+
+}  // namespace
+
+CameraMetadata createCaptureResultMetadata(
+    const std::chrono::nanoseconds timestamp,
+    const RequestSettings& requestSettings,
+    const Resolution reportedSensorSize) {
+  // All of the keys used in the response needs to be referenced in
+  // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
+  // in VirtualCameraDevice.cc).
+  MetadataBuilder builder =
+      MetadataBuilder()
+          .setAberrationCorrectionMode(
+              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
+          .setControlAeAvailableAntibandingModes(
+              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
+          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
+          .setControlAeExposureCompensation(0)
+          .setControlAeLockAvailable(false)
+          .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
+          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
+          .setControlAePrecaptureTrigger(
+              // Limited devices are expected to have precapture ae enabled and
+              // respond to cancellation request. Since we don't actuall support
+              // AE at all, let's just respect the cancellation expectation in
+              // case it's requested
+              requestSettings.aePrecaptureTrigger ==
+                      ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+                  ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
+                  : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
+          .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
+          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
+          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
+          .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
+          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
+          .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
+          .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
+          .setControlCaptureIntent(requestSettings.captureIntent)
+          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
+          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
+          .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
+          .setControlVideoStabilizationMode(
+              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
+          .setCropRegion(0, 0, reportedSensorSize.width,
+                         reportedSensorSize.height)
+          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
+          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
+          .setFlashMode(ANDROID_FLASH_MODE_OFF)
+          .setFocalLength(VirtualCameraDevice::kFocalLength)
+          .setJpegQuality(requestSettings.jpegQuality)
+          .setJpegOrientation(requestSettings.jpegOrientation)
+          .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
+                                requestSettings.thumbnailResolution.height)
+          .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
+          .setLensOpticalStabilizationMode(
+              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
+          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
+          .setPipelineDepth(kPipelineDepth)
+          .setSensorTimestamp(timestamp)
+          .setStatisticsHotPixelMapMode(
+              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
+          .setStatisticsLensShadingMapMode(
+              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
+          .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
+
+  if (requestSettings.fpsRange.has_value()) {
+    builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
+  }
+
+  if (requestSettings.gpsCoordinates.has_value()) {
+    const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
+    builder.setJpegGpsCoordinates(coordinates);
+  }
+
+  std::unique_ptr<CameraMetadata> metadata = builder.build();
+
+  if (metadata == nullptr) {
+    ALOGE("%s: Failed to build capture result metadata", __func__);
+    return CameraMetadata();
+  }
+  return std::move(*metadata);
+}
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraCaptureResult.h b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
new file mode 100644
index 0000000..9e5b4d7
--- /dev/null
+++ b/services/camera/virtualcamera/VirtualCameraCaptureResult.h
@@ -0,0 +1,53 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
+#define ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
+
+#include <chrono>
+#include <cstdint>
+#include <cstring>
+#include <future>
+#include <memory>
+#include <mutex>
+#include <thread>
+#include <utility>
+#include <vector>
+
+#include "Exif.h"
+#include "GLES/gl.h"
+#include "VirtualCameraCaptureRequest.h"
+#include "VirtualCameraDevice.h"
+#include "VirtualCameraRenderThread.h"
+#include "VirtualCameraSessionContext.h"
+#include "aidl/android/hardware/camera/device/CameraMetadata.h"
+
+namespace android {
+namespace companion {
+namespace virtualcamera {
+
+// Construct the Metadata for the Capture result based on the request
+// settings, timestamp and reported sensore size
+::aidl::android::hardware::camera::device::CameraMetadata
+createCaptureResultMetadata(std::chrono::nanoseconds timestamp,
+                            const RequestSettings& requestSettings,
+                            Resolution reportedSensorSize);
+
+}  // namespace virtualcamera
+}  // namespace companion
+}  // namespace android
+
+#endif  // ANDROID_COMPANION_VIRTUALCAMERA_VIRTUALCAMERACAPTURERESULT_H
\ No newline at end of file
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.cc b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
index cd17517..40a96e4 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.cc
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.cc
@@ -29,6 +29,7 @@
 
 #include "Exif.h"
 #include "GLES/gl.h"
+#include "VirtualCameraCaptureResult.h"
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/common/Status.h"
@@ -92,95 +93,10 @@
 
 static constexpr std::chrono::milliseconds kAcquireFenceTimeout = 500ms;
 
-// See REQUEST_PIPELINE_DEPTH in CaptureResult.java.
-// This roughly corresponds to frame latency, we set to
-// documented minimum of 2.
-static constexpr uint8_t kPipelineDepth = 2;
-
 static constexpr size_t kJpegThumbnailBufferSize = 32 * 1024;  // 32 KiB
 
 static constexpr UpdateTextureTask kUpdateTextureTask;
 
-CameraMetadata createCaptureResultMetadata(
-    const std::chrono::nanoseconds timestamp,
-    const RequestSettings& requestSettings,
-    const Resolution reportedSensorSize) {
-  // All of the keys used in the response needs to be referenced in
-  // availableResultKeys in CameraCharacteristics (see initCameraCharacteristics
-  // in VirtualCameraDevice.cc).
-  MetadataBuilder builder =
-      MetadataBuilder()
-          .setAberrationCorrectionMode(
-              ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF)
-          .setControlAeAvailableAntibandingModes(
-              {ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF})
-          .setControlAeAntibandingMode(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF)
-          .setControlAeExposureCompensation(0)
-          .setControlAeLockAvailable(false)
-          .setControlAeLock(ANDROID_CONTROL_AE_LOCK_OFF)
-          .setControlAeMode(ANDROID_CONTROL_AE_MODE_ON)
-          .setControlAePrecaptureTrigger(
-              // Limited devices are expected to have precapture ae enabled and
-              // respond to cancellation request. Since we don't actuall support
-              // AE at all, let's just respect the cancellation expectation in
-              // case it's requested
-              requestSettings.aePrecaptureTrigger ==
-                      ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
-                  ? ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_CANCEL
-                  : ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE)
-          .setControlAeState(ANDROID_CONTROL_AE_STATE_INACTIVE)
-          .setControlAfMode(ANDROID_CONTROL_AF_MODE_OFF)
-          .setControlAfTrigger(ANDROID_CONTROL_AF_TRIGGER_IDLE)
-          .setControlAfState(ANDROID_CONTROL_AF_STATE_INACTIVE)
-          .setControlAwbMode(ANDROID_CONTROL_AWB_MODE_AUTO)
-          .setControlAwbLock(ANDROID_CONTROL_AWB_LOCK_OFF)
-          .setControlAwbState(ANDROID_CONTROL_AWB_STATE_INACTIVE)
-          .setControlCaptureIntent(requestSettings.captureIntent)
-          .setControlEffectMode(ANDROID_CONTROL_EFFECT_MODE_OFF)
-          .setControlMode(ANDROID_CONTROL_MODE_AUTO)
-          .setControlSceneMode(ANDROID_CONTROL_SCENE_MODE_DISABLED)
-          .setControlVideoStabilizationMode(
-              ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF)
-          .setCropRegion(0, 0, reportedSensorSize.width,
-                         reportedSensorSize.height)
-          .setFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF)
-          .setFlashState(ANDROID_FLASH_STATE_UNAVAILABLE)
-          .setFlashMode(ANDROID_FLASH_MODE_OFF)
-          .setFocalLength(VirtualCameraDevice::kFocalLength)
-          .setJpegQuality(requestSettings.jpegQuality)
-          .setJpegOrientation(requestSettings.jpegOrientation)
-          .setJpegThumbnailSize(requestSettings.thumbnailResolution.width,
-                                requestSettings.thumbnailResolution.height)
-          .setJpegThumbnailQuality(requestSettings.thumbnailJpegQuality)
-          .setLensOpticalStabilizationMode(
-              ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF)
-          .setNoiseReductionMode(ANDROID_NOISE_REDUCTION_MODE_OFF)
-          .setPipelineDepth(kPipelineDepth)
-          .setSensorTimestamp(timestamp)
-          .setStatisticsHotPixelMapMode(
-              ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF)
-          .setStatisticsLensShadingMapMode(
-              ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF)
-          .setStatisticsSceneFlicker(ANDROID_STATISTICS_SCENE_FLICKER_NONE);
-
-  if (requestSettings.fpsRange.has_value()) {
-    builder.setControlAeTargetFpsRange(requestSettings.fpsRange.value());
-  }
-
-  if (requestSettings.gpsCoordinates.has_value()) {
-    const GpsCoordinates& coordinates = requestSettings.gpsCoordinates.value();
-    builder.setJpegGpsCoordinates(coordinates);
-  }
-
-  std::unique_ptr<CameraMetadata> metadata = builder.build();
-
-  if (metadata == nullptr) {
-    ALOGE("%s: Failed to build capture result metadata", __func__);
-    return CameraMetadata();
-  }
-  return std::move(*metadata);
-}
-
 NotifyMsg createShutterNotifyMsg(int frameNumber,
                                  std::chrono::nanoseconds timestamp) {
   NotifyMsg msg;
@@ -679,7 +595,7 @@
     return {};
   }
 
-  // TODO(b/324383963) Add support for letterboxing if the thumbnail size
+  // TODO(b/324383963) Add support for letterboxing if the thumbnail sizese
   // doesn't correspond
   //  to input texture aspect ratio.
   if (!renderIntoEglFramebuffer(*framebuffer, /*fence=*/nullptr,
@@ -753,6 +669,7 @@
   PlanesLockGuard planesLock(hwBuffer, AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN,
                              fence);
   if (planesLock.getStatus() != OK) {
+    ALOGE("Failed to lock hwBuffer planes");
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
@@ -760,23 +677,35 @@
       createExif(Resolution(stream->width, stream->height), resultMetadata,
                  createThumbnail(requestSettings.thumbnailResolution,
                                  requestSettings.thumbnailJpegQuality));
+
+  unsigned long outBufferSize = stream->bufferSize - sizeof(CameraBlob);
+  void* outBuffer = (*planesLock).planes[0].data;
   std::optional<size_t> compressedSize = compressJpeg(
       stream->width, stream->height, requestSettings.jpegQuality,
-      framebuffer->getHardwareBuffer(), app1ExifData,
-      stream->bufferSize - sizeof(CameraBlob), (*planesLock).planes[0].data);
+      framebuffer->getHardwareBuffer(), app1ExifData, outBufferSize, outBuffer);
 
   if (!compressedSize.has_value()) {
     ALOGE("%s: Failed to compress JPEG image", __func__);
     return cameraStatus(Status::INTERNAL_ERROR);
   }
 
+  // Add the transport header at the end of the JPEG output buffer.
+  //
+  // jpegBlobId must start at byte[buffer_size - sizeof(CameraBlob)],
+  // where the buffer_size is the size of gralloc buffer.
+  //
+  // See
+  // hardware/interfaces/camera/device/aidl/android/hardware/camera/device/CameraBlobId.aidl
+  // for the full explanation of the following code.
   CameraBlob cameraBlob{
       .blobId = CameraBlobId::JPEG,
       .blobSizeBytes = static_cast<int32_t>(compressedSize.value())};
 
-  memcpy(reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
-             (stream->bufferSize - sizeof(cameraBlob)),
-         &cameraBlob, sizeof(cameraBlob));
+  // Copy the cameraBlob to the end of the JPEG buffer.
+  uint8_t* jpegStreamEndAddress =
+      reinterpret_cast<uint8_t*>((*planesLock).planes[0].data) +
+      (stream->bufferSize - sizeof(cameraBlob));
+  memcpy(jpegStreamEndAddress, &cameraBlob, sizeof(cameraBlob));
 
   ALOGV("%s: Successfully compressed JPEG image, resulting size %zu B",
         __func__, compressedSize.value());
diff --git a/services/camera/virtualcamera/VirtualCameraRenderThread.h b/services/camera/virtualcamera/VirtualCameraRenderThread.h
index 5a5966b..aafed44 100644
--- a/services/camera/virtualcamera/VirtualCameraRenderThread.h
+++ b/services/camera/virtualcamera/VirtualCameraRenderThread.h
@@ -26,6 +26,7 @@
 #include <variant>
 #include <vector>
 
+#include "VirtualCameraCaptureRequest.h"
 #include "VirtualCameraDevice.h"
 #include "VirtualCameraSessionContext.h"
 #include "aidl/android/hardware/camera/device/CameraMetadata.h"
@@ -56,19 +57,6 @@
   const sp<Fence> mFence;
 };
 
-struct RequestSettings {
-  int jpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
-  int jpegOrientation = VirtualCameraDevice::kDefaultJpegOrientation;
-  Resolution thumbnailResolution = Resolution(0, 0);
-  int thumbnailJpegQuality = VirtualCameraDevice::kDefaultJpegQuality;
-  std::optional<FpsRange> fpsRange;
-  camera_metadata_enum_android_control_capture_intent_t captureIntent =
-      VirtualCameraDevice::kDefaultCaptureIntent;
-  std::optional<GpsCoordinates> gpsCoordinates;
-  std::optional<camera_metadata_enum_android_control_ae_precapture_trigger>
-      aePrecaptureTrigger;
-};
-
 // Represents single capture request to fill set of buffers.
 class ProcessCaptureRequestTask {
  public:
diff --git a/services/camera/virtualcamera/VirtualCameraSession.cc b/services/camera/virtualcamera/VirtualCameraSession.cc
index e1815c7..88929cc 100644
--- a/services/camera/virtualcamera/VirtualCameraSession.cc
+++ b/services/camera/virtualcamera/VirtualCameraSession.cc
@@ -72,7 +72,6 @@
 namespace companion {
 namespace virtualcamera {
 
-using ::aidl::android::companion::virtualcamera::Format;
 using ::aidl::android::companion::virtualcamera::IVirtualCameraCallback;
 using ::aidl::android::companion::virtualcamera::SupportedStreamConfiguration;
 using ::aidl::android::hardware::camera::common::Status;
@@ -87,7 +86,6 @@
 using ::aidl::android::hardware::camera::device::Stream;
 using ::aidl::android::hardware::camera::device::StreamBuffer;
 using ::aidl::android::hardware::camera::device::StreamConfiguration;
-using ::aidl::android::hardware::camera::device::StreamRotation;
 using ::aidl::android::hardware::common::fmq::MQDescriptor;
 using ::aidl::android::hardware::common::fmq::SynchronizedReadWrite;
 using ::aidl::android::hardware::graphics::common::BufferUsage;
diff --git a/services/camera/virtualcamera/util/EglDisplayContext.cc b/services/camera/virtualcamera/util/EglDisplayContext.cc
index 70da25b..ccd0d71 100644
--- a/services/camera/virtualcamera/util/EglDisplayContext.cc
+++ b/services/camera/virtualcamera/util/EglDisplayContext.cc
@@ -54,7 +54,9 @@
   EGLint numConfigs = 0;
   EGLint configAttribs[] = {
       EGL_SURFACE_TYPE,
-      nativeWindow == nullptr ? EGL_PBUFFER_BIT : EGL_WINDOW_BIT,
+      nativeWindow == nullptr
+          ? EGL_PBUFFER_BIT  // Render into individual AHardwareBuffer
+          : EGL_WINDOW_BIT,  // Render into Surface (ANativeWindow)
       EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_RED_SIZE, 8, EGL_GREEN_SIZE,
       8, EGL_BLUE_SIZE, 8,
       // no alpha
@@ -83,6 +85,9 @@
     }
   }
 
+  // EGL is a big state machine. Now that we have a configuration ready, we set
+  // this state machine to that configuration (we make it the "current"
+  // configuration).
   if (!makeCurrent()) {
     ALOGE(
         "Failed to set newly initialized EGLContext and EGLDisplay connection "
diff --git a/services/camera/virtualcamera/util/EglProgram.cc b/services/camera/virtualcamera/util/EglProgram.cc
index 28f04cf..eda4169 100644
--- a/services/camera/virtualcamera/util/EglProgram.cc
+++ b/services/camera/virtualcamera/util/EglProgram.cc
@@ -96,6 +96,7 @@
       fragColor = texture(uTexture, vTextureCoord);
     })";
 
+// Shader to render a RGBA texture into a YUV buffer.
 constexpr char kExternalRgbaTextureFragmentShader[] = R"(#version 300 es
     #extension GL_OES_EGL_image_external_essl3 : require
     #extension GL_EXT_YUV_target : require
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.cc b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
index c81d36d..98bf62a 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.cc
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.cc
@@ -18,19 +18,27 @@
 #include "utils/Timers.h"
 #define LOG_TAG "EglSurfaceTexture"
 
+#include <GLES/gl.h>
+#include <com_android_graphics_libgui_flags.h>
+#include <gui/BufferQueue.h>
+#include <gui/GLConsumer.h>
+#include <gui/IGraphicBufferProducer.h>
+#include <hardware/gralloc.h>
+
 #include <cstdint>
 
 #include "EglSurfaceTexture.h"
 #include "EglUtil.h"
-#include "GLES/gl.h"
-#include "gui/BufferQueue.h"
-#include "gui/GLConsumer.h"
-#include "gui/IGraphicBufferProducer.h"
-#include "hardware/gralloc.h"
 
 namespace android {
 namespace companion {
 namespace virtualcamera {
+namespace {
+
+// Maximal number of buffers producer can dequeue without blocking.
+constexpr int kBufferProducerMaxDequeueBufferCount = 64;
+
+}  // namespace
 
 EglSurfaceTexture::EglSurfaceTexture(const uint32_t width, const uint32_t height)
     : mWidth(width), mHeight(height) {
@@ -39,7 +47,23 @@
     ALOGE("Failed to generate texture");
     return;
   }
+
+#if COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+  mGlConsumer = sp<GLConsumer>::make(mTextureId, GLConsumer::TEXTURE_EXTERNAL,
+                                     false, false);
+  mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
+  mGlConsumer->setDefaultBufferSize(mWidth, mHeight);
+  mGlConsumer->setConsumerUsageBits(GRALLOC_USAGE_HW_TEXTURE);
+  mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
+
+  mSurface = mGlConsumer->getSurface();
+  mSurface->setMaxDequeuedBufferCount(kBufferProducerMaxDequeueBufferCount);
+#else
   BufferQueue::createBufferQueue(&mBufferProducer, &mBufferConsumer);
+  // Set max dequeue buffer count for producer to maximal value to prevent
+  // blocking when dequeuing input buffers.
+  mBufferProducer->setMaxDequeuedBufferCount(
+      kBufferProducerMaxDequeueBufferCount);
   mGlConsumer = sp<GLConsumer>::make(
       mBufferConsumer, mTextureId, GLConsumer::TEXTURE_EXTERNAL, false, false);
   mGlConsumer->setName(String8("VirtualCameraEglSurfaceTexture"));
@@ -48,6 +72,7 @@
   mGlConsumer->setDefaultBufferFormat(AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420);
 
   mSurface = sp<Surface>::make(mBufferProducer);
+#endif  // COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 }
 
 EglSurfaceTexture::~EglSurfaceTexture() {
@@ -75,7 +100,26 @@
 }
 
 GLuint EglSurfaceTexture::updateTexture() {
-  mGlConsumer->updateTexImage();
+  int previousFrameId;
+  int framesAdvance = 0;
+  // Consume buffers one at the time.
+  // Contrary to the code comments in GLConsumer, the GLConsumer acquires
+  // next queued buffer (not the most recently queued buffer).
+  while (true) {
+    previousFrameId = mGlConsumer->getFrameNumber();
+    mGlConsumer->updateTexImage();
+    int currentFrameId = mGlConsumer->getFrameNumber();
+    if (previousFrameId == currentFrameId) {
+      // Frame number didn't change after updating the texture,
+      // this means we're at the end of the queue and current attached
+      // buffer is the most recent buffer.
+      break;
+    }
+
+    framesAdvance++;
+    previousFrameId = currentFrameId;
+  }
+  ALOGV("%s: Advanced %d frames", __func__, framesAdvance);
   return mTextureId;
 }
 
diff --git a/services/camera/virtualcamera/util/EglSurfaceTexture.h b/services/camera/virtualcamera/util/EglSurfaceTexture.h
index ac3cf7d..a46af8f 100644
--- a/services/camera/virtualcamera/util/EglSurfaceTexture.h
+++ b/services/camera/virtualcamera/util/EglSurfaceTexture.h
@@ -17,18 +17,21 @@
 #ifndef ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 #define ANDROID_COMPANION_VIRTUALCAMERA_EGLSURFACETEXTURE_H
 
+#include <GLES/gl.h>
+#include <gui/ConsumerBase.h>
+#include <gui/Surface.h>
+#include <utils/RefBase.h>
+
 #include <chrono>
 #include <cstdint>
 
-#include "GLES/gl.h"
-#include "gui/ConsumerBase.h"
-#include "gui/Surface.h"
-#include "utils/RefBase.h"
-
 namespace android {
 
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
 class IGraphicBufferProducer;
 class IGraphicBufferConsumer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
+
 class GLConsumer;
 
 namespace companion {
@@ -80,8 +83,10 @@
   std::array<float, 16> getTransformMatrix();
 
  private:
+#if !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
   sp<IGraphicBufferProducer> mBufferProducer;
   sp<IGraphicBufferConsumer> mBufferConsumer;
+#endif  // !COM_ANDROID_GRAPHICS_LIBGUI_FLAGS(WB_CONSUMER_BASE_OWNS_BQ)
   sp<GLConsumer> mGlConsumer;
   sp<Surface> mSurface;
   GLuint mTextureId;
diff --git a/services/tuner/Android.bp b/services/tuner/Android.bp
index e29d520..5d7bf68 100644
--- a/services/tuner/Android.bp
+++ b/services/tuner/Android.bp
@@ -15,7 +15,7 @@
     imports: [
         "android.hardware.common-V2",
         "android.hardware.common.fmq-V1",
-        "android.hardware.tv.tuner-V2",
+        "android.hardware.tv.tuner-V3",
     ],
     backend: {
         java: {
@@ -41,7 +41,7 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
-        "android.hardware.tv.tuner-V2-ndk",
+        "android.hardware.tv.tuner-V3-ndk",
         "libbase",
         "libbinder",
         "libbinder_ndk",
@@ -84,7 +84,7 @@
     shared_libs: [
         "android.hardware.tv.tuner@1.0",
         "android.hardware.tv.tuner@1.1",
-        "android.hardware.tv.tuner-V2-ndk",
+        "android.hardware.tv.tuner-V3-ndk",
         "libbase",
         "libcutils",
         "libbinder",